2017-05-27 46 views
1

当我在谷歌搜索我的域名时,它会显示来自我的网站的多个https网址,因为google喜欢https,但出于特殊原因,我不想索引https/ssl版本。如何防止谷歌索引我的https网址?

如何避免这种情况,整个世界只通过htaccess写解决方案,但是大多数情况下使用nginx!如何解决这个问题:

RewriteEngine On 
RewriteCond %{HTTPS} =on 
RewriteRule ^robots.txt$ robots-deny-all.txt [L] 

机器人含量(以额外的机器人):

User-agent: * 
Disallow:/

如何通过nginx的重写规则做到这一点?

谢谢。

只是一个加数(我/etc/nginx/sites-availble/somedomain.conf):

server { 
    server_name somedomain.com www.somedomain.com; 
    listen 100.10.10.10; 
    root /home/somedomain/public_html; 
    index index.php index.html index.htm; 
    access_log /var/log/virtualmin/somedomain.com_access_log; 
    error_log /var/log/virtualmin/somedomain.com_error_log; 
    fastcgi_param GATEWAY_INTERFACE CGI/1.1; 
    fastcgi_param SERVER_SOFTWARE nginx; 
    fastcgi_param QUERY_STRING $query_string; 
    fastcgi_param REQUEST_METHOD $request_method; 
    fastcgi_param CONTENT_TYPE $content_type; 
    fastcgi_param CONTENT_LENGTH $content_length; 
    fastcgi_param SCRIPT_FILENAME /home/somedomain/public_html$fastcgi_script_name; 
    fastcgi_param SCRIPT_NAME $fastcgi_script_name; 
    fastcgi_param REQUEST_URI $request_uri; 
    fastcgi_param DOCUMENT_URI $document_uri; 
    fastcgi_param DOCUMENT_ROOT /home/somedomain/public_html; 
    fastcgi_param SERVER_PROTOCOL $server_protocol; 
    fastcgi_param REMOTE_ADDR $remote_addr; 
    fastcgi_param REMOTE_PORT $remote_port; 
    fastcgi_param SERVER_ADDR $server_addr; 
    fastcgi_param SERVER_PORT $server_port; 
    fastcgi_param SERVER_NAME $server_name; 
    fastcgi_param HTTPS $https; 
    location ~ \.php$ { 
     try_files $uri =404; 
     fastcgi_pass unix:/run/php/php7.0-fpm.sock; 
fastcgi_split_path_info ^(.+\.php)(/.+)$; 
fastcgi_index index.php; 
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 
include fastcgi_params; 
    } 
     ## 
     # cache client 
     ## 
location ~* \.(jpg|jpeg|gif|png|css|js|ico|xml)$ { 
     access_log  off; 
     log_not_found  off; 
     expires   30d; 
    } 
location/{ 
# First attempt to serve request as file, then 
# as directory, then fall back to displaying a 404. 
try_files $uri $uri/ /index.php?q=$uri&$args; 
# Uncomment to enable naxsi on this location 
# include /etc/nginx/naxsi.rules 
} 
location = /robots.txt { 
    if ($scheme = https) { 
     rewrite^/robots-deny-all.txt permanent; 
    } 
} 
error_page 404 /404.html; 
error_page 500 502 503 504 /50x.html; 
location = /50x.html { 
root /usr/share/nginx/html; 
} 
    listen 100.10.10.10:443 ssl; 
    ssl_certificate /home/somedomain/ssl.cert; 
    ssl_certificate_key /home/somedomain/ssl.key; 
    add_header X-Robots-Tag "noindex, nofollow, nosnippet, noarchive"; 
} 

我/etc/ngonx/nginx.conf

user www-data; 
worker_processes auto; 
pid /run/nginx.pid; 

events { 
    worker_connections 1024; 
    # multi_accept on; 
} 

http { 

    ## 
    # Basic Settings 
    ## 

    sendfile on; 
    tcp_nopush on; 
    tcp_nodelay on; 
    keepalive_timeout 65; 
    types_hash_max_size 2048; 
    # server_tokens off; 

    # server_names_hash_bucket_size 64; 
    # server_name_in_redirect off; 

    include /etc/nginx/mime.types; 
    default_type application/octet-stream; 

    ## 
    # SSL Settings 
    ## 

    ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE 
    ssl_prefer_server_ciphers on; 

    ## 
    # Logging Settings 
    ## 

    access_log /var/log/nginx/access.log; 
    error_log /var/log/nginx/error.log; 

    ## 
    # Gzip Settings 
    ## 

    gzip on; 
    gzip_disable "msie6"; 

    gzip_vary on; 
    gzip_proxied any; 
    gzip_comp_level 6; 
    gzip_buffers 16 8k; 
    gzip_http_version 1.1; 
    gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript; 

    ## 
    # Virtual Host Configs 
    ## 

    include /etc/nginx/conf.d/*.conf; 
    include /etc/nginx/sites-enabled/*; 
    server_names_hash_bucket_size 128; 
client_max_body_size 100M; 
client_body_buffer_size 16k; 

fastcgi_buffers 8 16k; 
fastcgi_buffer_size 32k; 
fastcgi_connect_timeout 180; 
fastcgi_send_timeout 180; 
fastcgi_read_timeout 180; 
} 


#mail { 
# # See sample authentication script at: 
# # http://wiki.nginx.org/ImapAuthenticateWithApachePhpScript 
# 
# # auth_http localhost/auth.php; 
# # pop3_capabilities "TOP" "USER"; 
# # imap_capabilities "IMAP4rev1" "UIDPLUS"; 
# 
# server { 
#  listen  localhost:110; 
#  protocol pop3; 
#  proxy  on; 
# } 
# 
# server { 
#  listen  localhost:143; 
#  protocol imap; 
#  proxy  on; 
# } 
#} 

回答

0

在你nginx.conf文件,您可以将头添加到在443(SSL)上侦听的server块:

server { 
    listen 443 ssl; 
    ... 

    # This header will prevent search engines from indexing your https pages 
    add_header X-Robots-Tag "noindex, nofollow, nosnippet, noarchive"; 
} 
+0

非常感谢您的回复@borislemke先前的评论感到抱歉,在准备好之前我意外发送了它,并且在5分钟限制之前没有更好的时间。我在问题中添加了我的服务器块,你能否告诉我是否还需要添加linsten 443 ssl;在服务器块的开始,因为最后我已经默认了它? ....我在哪里添加add_header ...? – hercules

+0

我很困惑,首先你在我的nginx.conf中说过,然后为我添加服务器块,我很困惑,使用我的设置来说添加add_header X-Rob ...(在nginx.conf文件中不存在的服务器块,但存在于每个虚拟主机...在nginx-sites-availble/somedomain.conf)再次感谢。 – hercules

+0

当我说'nginx.conf'时,它不一定是那个特定的文件。我只是想说你的nginx配置。无论你存储在哪里。如果你使用'sites-enabled'那么它就是 – borislemke

0

执行类似的东西你.htaccess文件,使用:

location = /robots.txt { 
    if ($scheme = https) { 
     rewrite^/robots-deny-all.txt permanent; 
    } 
} 

this caution对使用​​if

+0

非常感谢您的回复@理查德史密斯这个问题确实带走了我的睡眠,当人们访问我自己的ssl URL时,这很可怕,正如您在我的块中看到的,我添加了您的配置(请告诉我,如果我添加了在正确的地方),但我仍然在测试,我不知道如何离开我的URL在谷歌索引的SSL,要知道如果这个新的配置,它会真的跳过我的https网址,我只用它来访问我的登录页面通过https,没有别的。 – hercules

+0

我不能标记为有用的,因为我有不到15分,但当我测试,从谷歌缓存中删除我的网址,如果我的https网址不再索引,只有我的http被索引,我肯定会标记为已解决通过你的回答,因为这个问题是我所遇到过的最严重的问题之一。 – hercules