Fetching metadata from the HF Docker repository...
				
								download
							
						
								history
							
						
								blame
							
						
								contribute
							
						
								delete
							
						
				
	
						
		
		
				
				
			Detected Pickle imports (300)
- "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "builtins.set",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "builtins.int",
 - "__builtin__.long",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "__builtin__.set",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "nltk.tokenize.punkt.PunktToken",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "nltk.tokenize.punkt.PunktToken",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "__builtin__.set",
 - "copy_reg._reconstructor",
 - "__builtin__.int",
 - "__builtin__.object",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "__builtin__.set",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "nltk.tokenize.punkt.PunktToken",
 - "__builtin__.int",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters",
 - "nltk.tokenize.punkt.PunktLanguageVars",
 - "__builtin__.set",
 - "nltk.tokenize.punkt.PunktSentenceTokenizer",
 - "nltk.tokenize.punkt.PunktToken",
 - "__builtin__.int",
 - "collections.defaultdict",
 - "nltk.tokenize.punkt.PunktParameters"
 
					13.9 MB
- SHA256:
 - 51c3078994aeaf650bfc8e028be4fb42b4a0d177d41c012b6a983979653660ec
 - Pointer size:
 - 133 Bytes
 - Size of remote file:
 - 13.9 MB
 - Xet backed hash:
 - b864d1477a17346b0f7f13709f6688fca2de4aff22928b510e73a918e3cb64ca
 
·
								·
									Xet efficiently stores Large Files inside Git, intelligently splitting files into unique chunks and accelerating uploads and downloads. More info.