config root man

Current Path : /usr/local/lib/python2.5/

FreeBSD hs32.drive.ne.jp 9.1-RELEASE FreeBSD 9.1-RELEASE #1: Wed Jan 14 12:18:08 JST 2015 root@hs32.drive.ne.jp:/sys/amd64/compile/hs32 amd64
Upload File :
Current File : //usr/local/lib/python2.5/robotparser.pyc

³ò
h”Rc@s¶dZddkZddkZdgZdad„Zddd„ƒYZddd„ƒYZd	dd
„ƒYZdei	fd„ƒYZ
d
„Zd„Ze
djoeƒndS(s< robotparser.py

    Copyright (C) 2000  Bastian Kleineidam

    You can choose between two licenses when using this package:
    1) GNU GPLv2
    2) PSF license for Python 2.2

    The robots.txt Exclusion Protocol is implemented as specified in
    http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
iÿÿÿÿNtRobotFileParsericCsto	|GHndS(N(tdebug(tmsg((s'/usr/local/lib/python2.5/robotparser.pyt_debugscBsbeZdZdd„Zd„Zd„Zd„Zd„Zd„Zd„Z	d	„Z
d
„ZRS(ss This class provides a set of methods to read, parse and answer
    questions about a single robots.txt file.

    tcCs>g|_d|_t|_t|_|i|ƒd|_dS(Ni(tentriestNonet
default_entrytFalsetdisallow_allt	allow_alltset_urltlast_checked(tselfturl((s'/usr/local/lib/python2.5/robotparser.pyt__init__s				
cCs|iS(s·Returns the time the robots.txt file was last fetched.

        This is useful for long-running web spiders that need to
        check for new robots.txt files periodically.

        (R(R
((s'/usr/local/lib/python2.5/robotparser.pytmtime$scCsddk}|iƒ|_dS(sYSets the time the robots.txt file was last fetched to the
        current time.

        iÿÿÿÿN(ttimeR(R
R((s'/usr/local/lib/python2.5/robotparser.pytmodified-scCs/||_ti|ƒdd!\|_|_dS(s,Sets the URL referring to a robots.txt file.iiN(Rturlparsethosttpath(R
R((s'/usr/local/lib/python2.5/robotparser.pyR5s	cCsûtƒ}|i|iƒ}g}|iƒ}x+|o#|i|iƒƒ|iƒ}q0W|i|_|idjp|idjot|_t	dƒnZ|idjot|_
t	dƒn3|idjo"|ot	dƒ|i|ƒndS(	s4Reads the robots.txt URL and feeds it to the parser.i‘i“sdisallow allis	allow alliÈsparse linesN(t	URLopenertopenRtreadlinetappendtstripterrcodetTrueR	RR
tparse(R
topenertftlinestline((s'/usr/local/lib/python2.5/robotparser.pytread:s$	 		
cCs1d|ijo
||_n|ii|ƒdS(Nt*(t
useragentsRRR(R
tentry((s'/usr/local/lib/python2.5/robotparser.pyt
_add_entryNs
cCs«d}d}tƒ}xZ|D]R}|d}|p_|djo!td|ƒtƒ}d}q’|djo |i|ƒtƒ}d}q’n|idƒ}|djo|| }n|iƒ}|pqn|iddƒ}t|ƒdjo_|diƒiƒ|d<ti	|diƒƒ|d<|ddjoS|djo(td|ƒ|i|ƒtƒ}n|i
i|dƒd}qn|dd	joF|djotd
|ƒqV|iit
|dtƒƒd}qn|ddjo@|djotd
|ƒqV|iit
|dtƒƒqntd||dfƒqtd
||fƒqW|djo|ii|ƒntdt|ƒƒdS(sparse the input lines from a robots.txt file.
           We allow that a user-agent: line is not preceded by
           one or more blank lines.iis]line %d: warning: you should insert allow: or disallow: directives below any user-agent: lineit#t:s
user-agentsPline %d: warning: you should insert a blank line before any user-agent directivetdisallowsHline %d: error: you must insert a user-agent: directive before this linetallows line %d: warning: unknown key %ss!line %d: error: malformed line %ssParsed rules:
%sN(tEntryRR&tfindRtsplittlentlowerturllibtunquoteR$Rt	rulelinestRuleLineRRRtstr(R
R tstatet
linenumberR%R!ti((s'/usr/local/lib/python2.5/robotparser.pyRUsf	

	


	







!	
cCs»td||fƒ|iotSn|iotSntititi|ƒƒdƒpd}x2|i	D]'}|i
|ƒo|i|ƒSqnqnW|io|ii|ƒSntS(s=using the parsed robots.txt decide if useragent can fetch urls=Checking robots.txt allowance for:
  user agent: %s
  url: %sit/(
RR	RR
RR0tquoteRR1Rt
applies_tot	allowanceR(R
t	useragentRR%((s'/usr/local/lib/python2.5/robotparser.pyt	can_fetch•s

,

cCs2d}x%|iD]}|t|ƒd}qW|S(NRs
(RR4(R
tretR%((s'/usr/local/lib/python2.5/robotparser.pyt__str__ªs

(t__name__t
__module__t__doc__RRRRR"R&RR=R?(((s'/usr/local/lib/python2.5/robotparser.pyRs							@	R3cBs)eZdZd„Zd„Zd„ZRS(soA rule line is a single "Allow:" (allowance==True) or "Disallow:"
       (allowance==False) followed by a path.cCs>|djo|o
t}nti|ƒ|_||_dS(NR(RR0R9RR;(R
RR;((s'/usr/local/lib/python2.5/robotparser.pyR´s
cCs |idjp|i|iƒS(NR#(Rt
startswith(R
tfilename((s'/usr/local/lib/python2.5/robotparser.pyR:»scCs |iodpdd|iS(NtAllowtDisallows: (R;R(R
((s'/usr/local/lib/python2.5/robotparser.pyR?¾s(R@RARBRR:R?(((s'/usr/local/lib/python2.5/robotparser.pyR3±s		R+cBs2eZdZd„Zd„Zd„Zd„ZRS(s?An entry has one or more user-agents and zero or more rulelinescCsg|_g|_dS(N(R$R2(R
((s'/usr/local/lib/python2.5/robotparser.pyRÄs	cCsXd}x#|iD]}|d|d}qWx%|iD]}|t|ƒd}q6W|S(NRsUser-agent: s
(R$R2R4(R
R>tagentR!((s'/usr/local/lib/python2.5/robotparser.pyR?Ès

cCsg|idƒdiƒ}xG|iD]<}|djotSn|iƒ}||jotSq#q#WtS(s2check if this entry applies to the specified agentR8iR#(R-R/R$RR(R
R<RG((s'/usr/local/lib/python2.5/robotparser.pyR:Ðs


cCsOxH|iD]=}t|t|ƒ|ifƒ|i|ƒo|iSq
q
WtS(sZPreconditions:
        - our agent applies to this entry
        - filename is URL decoded(R2RR4R;R:R(R
RDR!((s'/usr/local/lib/python2.5/robotparser.pyR;Ýs
(R@RARBRR?R:R;(((s'/usr/local/lib/python2.5/robotparser.pyR+Âs
			
RcBs#eZd„Zd„Zd„ZRS(cGs tii||Œd|_dS(NiÈ(R0tFancyURLopenerRR(R
targs((s'/usr/local/lib/python2.5/robotparser.pyRèscCsdS(N(NN(R(R
Rtrealm((s'/usr/local/lib/python2.5/robotparser.pytprompt_user_passwdìscCs(||_tii||||||ƒS(N(RR0RHthttp_error_default(R
RtfpRterrmsgtheaders((s'/usr/local/lib/python2.5/robotparser.pyRLñs	(R@RARRKRL(((s'/usr/local/lib/python2.5/robotparser.pyRçs		cCs;|p
d}nd}||jo	dGHn
d|GHHdS(Ns
access deniedsaccess allowedtfailedsok (%s)((tatbtac((s'/usr/local/lib/python2.5/robotparser.pyt_checkös

		cCs†tƒ}da|idƒ|iƒt|iddƒdƒt|iddƒdƒt|iddƒdƒt|id	dƒdƒt|id
dƒdƒt|iddƒdƒt|id
dƒdƒt|iddƒdƒt|iddƒdƒt|iddƒdƒt|iddƒdƒt|iddƒdƒ|idƒ|iƒt|iddƒdƒdS(Nis"http://www.musi-cal.com/robots.txtR#shttp://www.musi-cal.com/RitCherryPickerSEs?http://www.musi-cal.com/cgi-bin/event-search?city=San+FranciscosCherryPickerSE/1.0sCherryPickerSE/1.5tExtractorProshttp://www.musi-cal.com/blubbatextractorprostoolpak/1.1tspamshttp://www.musi-cal.com/searchs#http://www.musi-cal.com/Musician/meshttp://www.lycos.com/robots.txttMozillashttp://www.lycos.com/search(RRRR"RTR=(trp((s'/usr/local/lib/python2.5/robotparser.pyt_tests2	



t__main__((((RBRR0t__all__RRRR3R+RHRRTR[R@(((s'/usr/local/lib/python2.5/robotparser.pys<module>s		›%		'


Man Man