Nginx 处理除 https://domain.com 之外的所有域请求

Nginx 处理除 https://domain.com 之外的所有域请求

我们的配置如下 - 在 URL 栏中输入以下任何内容都可以到达所需的目标 URL 的网站(https://www.exampledomain.com):

http://exampledomain.com

http://www.exampledomain.com

www.exampledomain.com

exampledomain.com

https://www.exampledomain.com

所有这些转发/解析到https://www.exampledomain.com

问题是当有人输入(或链接到)https://exampledomain.com,我们收到连接被拒绝错误。

    server {
      listen 80;
      server_name exampledomain.com www.exampledomain.com;
      return 301 https://www.exampledomain.com$request_uri;
    }

    server {
      listen [::]:443 ssl http2;
      listen 443 ssl http2;
      server_name exampledomain.com;

    # make SSL stronger
      ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
      ssl_prefer_server_ciphers on;
      #ssl_dhparam /etc/ssl/certs/dhparam.pem;
      ssl_ciphers 'REDACTEDFORSERVERFAULT';
      ssl_session_timeout 1d;
      ssl_session_cache shared:SSL:50m;
      ssl_stapling on;
      ssl_stapling_verify on;
      add_header Strict-Transport-Security max-age=15768000;

      ssl_certificate /etc/letsencrypt/live/exampledomain.com/fullchain.pem;
      ssl_certificate_key /etc/letsencrypt/live/exampledomain.com/privkey.pem;

      return 301 https://www.exampledomain.com$request_uri;
    }

    server {

            # SSL configuration
            #
            listen 443 ssl default_server;
      listen [::]:443 ssl default_server;
            #
            # Note: You should disable gzip for SSL traffic.
            # See: https://bugs.debian.org/773332
            #
            # Read up on ssl_ciphers to ensure a secure configuration.
            # See: https://bugs.debian.org/765782
            #
            # Self signed certs generated by the ssl-cert package
            # Don't use them in a production server!
            #
            # include snippets/snakeoil.conf;

            root /var/www/repo-name-redacted/html;

            # Add index.php to the list if you are using PHP
            index index.php index.html index.htm index.nginx-debian.html;

            server_name www.exampledomain.com;

            # make SSL stronger
            ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
      ssl_prefer_server_ciphers on;
      #ssl_dhparam /etc/ssl/certs/dhparam.pem;
      ssl_ciphers 'REDACTED-FOR-SERVERFAULT';
      ssl_session_timeout 1d;
      ssl_session_cache shared:SSL:50m;
      ssl_stapling on;
      ssl_stapling_verify on;
      add_header Strict-Transport-Security max-age=15768000;

      ssl_certificate /etc/letsencrypt/live/exampledomain.com/fullchain.pem;
      ssl_certificate_key /etc/letsencrypt/live/exampledomain.com/privkey.pem;

            # set max file upload size to 10MB
            client_max_body_size 10M;

      # Enable compression, this will help if you have for instance advagg module
      # by serving Gzip versions of the files.
      gzip_static on;

      # needed by letsencrypt
      location ^~ /.well-known/ {
        allow all;
      }

      location / {
                    # First attempt to serve request as file, then
                    # as directory, then fall back to displaying a 404.
                    #try_files $uri $uri/ /index.php?$args;

                    error_page   404  =  /404.php;
            }

            #Cache-control far future jazz

      location ~* \.(?:ico|css|js|svg?|gif|jpe?g|png)$ {
         expires 7d;
         log_not_found off;
         add_header Pragma public;
         add_header Cache-Control "public";
      }

            # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000

            location ~ \.php$ {
                    include snippets/fastcgi-php.conf;

            #       # With php5-fpm:
                    fastcgi_pass unix:/var/run/php5-fpm.sock;

                    error_page   404  =  /404.php;
            }

      #!!! IMPORTANT !!! We need to hide the password file from prying eyes
      # This will deny access to any hidden file (beginning with a .period)
      location ~ /\. { deny  all; }

      location = /favicon.ico {
        log_not_found off;
        access_log off;
      }

      location = /robots.txt {
        allow all;
        log_not_found off;
        access_log off;
      }

    }

答案1

事实证明这是 DNS 配置错误,其中的 A 记录与 www 的 IP 地址的 A 记录不同。

相关内容