idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
100
def set_font_size ( self , pt = None , px = None ) : self . font . set_size ( pt , px ) self . _render ( )
Set the font size to the desired size in pt or px .
101
def cursor ( self ) : if self . _cursor < 0 : self . cursor = 0 if self . _cursor > len ( self ) : self . cursor = len ( self ) return self . _cursor
The position of the cursor in the text .
102
def delete_one_letter ( self , letter = RIGHT ) : assert letter in ( self . RIGHT , self . LEFT ) if letter == self . LEFT : papy = self . cursor self . text = self . text [ : self . cursor - 1 ] + self . text [ self . cursor : ] self . cursor = papy - 1 else : self . text = self . text [ : self . cursor ] + self . text [ self . cursor + 1 : ]
Delete one letter the right or the the left of the cursor .
103
def delete_one_word ( self , word = RIGHT ) : assert word in ( self . RIGHT , self . LEFT ) if word == self . RIGHT : papy = self . text . find ( ' ' , self . cursor ) + 1 if not papy : papy = len ( self . text ) self . text = self . text [ : self . cursor ] + self . text [ papy : ] else : papy = self . text . rfind ( ' ' , 0 , self . cursor ) if papy == - 1 : papy = 0 self . text = self . text [ : papy ] + self . text [ self . cursor : ] self . cursor = papy
Delete one word the right or the the left of the cursor .
104
def add_letter ( self , letter ) : assert isinstance ( letter , str ) assert len ( letter ) == 1 self . text = self . text [ : self . cursor ] + letter + self . text [ self . cursor : ] self . cursor += 1
Add a letter at the cursor pos .
105
def update ( self , event_or_list ) : event_or_list = super ( ) . update ( event_or_list ) for e in event_or_list : if e . type == KEYDOWN : if e . key == K_RIGHT : if e . mod * KMOD_CTRL : self . move_cursor_one_word ( self . RIGHT ) else : self . move_cursor_one_letter ( self . RIGHT ) elif e . key == K_LEFT : if e . mod * KMOD_CTRL : self . move_cursor_one_word ( self . LEFT ) else : self . move_cursor_one_letter ( self . LEFT ) elif e . key == K_BACKSPACE : if self . cursor == 0 : continue if e . mod & KMOD_CTRL : self . delete_one_word ( self . LEFT ) else : self . delete_one_letter ( self . LEFT ) elif e . key == K_DELETE : if e . mod & KMOD_CTRL : self . delete_one_word ( self . RIGHT ) else : self . delete_one_letter ( self . RIGHT ) elif e . unicode != '' and e . unicode . isprintable ( ) : self . add_letter ( e . unicode )
Update the text and position of cursor according to the event passed .
106
def shawn_text ( self ) : if len ( self . _shawn_text ) == len ( self ) : return self . _shawn_text if self . style == self . DOTS : return chr ( 0x2022 ) * len ( self ) ranges = [ ( 902 , 1366 ) , ( 192 , 683 ) , ( 33 , 122 ) ] s = '' while len ( s ) < len ( self . text ) : apolo = randint ( 33 , 1366 ) for a , b in ranges : if a <= apolo <= b : s += chr ( apolo ) break self . _shawn_text = s return s
The text displayed instead of the real one .
107
def cursor_pos ( self ) : if len ( self ) == 0 : return self . left + self . default_text . get_width ( ) papy = self . _surface . get_width ( ) if papy > self . w : shift = papy - self . width else : shift = 0 return self . left + self . font . size ( self . shawn_text [ : self . cursor ] ) [ 0 ] - shift
The cursor position in pixels .
108
def latex_to_img ( tex ) : with tempfile . TemporaryDirectory ( ) as tmpdirname : with open ( tmpdirname + r'\tex.tex' , 'w' ) as f : f . write ( tex ) os . system ( r"latex {0}\tex.tex -halt-on-error -interaction=batchmode -disable-installer -aux-directory={0} " r"-output-directory={0}" . format ( tmpdirname ) ) os . system ( r"dvipng -T tight -z 9 --truecolor -o {0}\tex.png {0}\tex.dvi" . format ( tmpdirname ) ) image = pygame . image . load ( tmpdirname + r'\tex.png' ) return image
Return a pygame image from a latex template .
109
def name2rgb ( name ) : try : import colour except ImportError : raise ImportError ( 'You need colour to be installed: pip install colour' ) c = colour . Color ( name ) color = int ( c . red * 255 ) , int ( c . green * 255 ) , int ( c . blue * 255 ) return color
Convert the name of a color into its RGB value
110
def parse_page ( page ) : colors = get_config ( ) [ 'colors' ] with io . open ( page , encoding = 'utf-8' ) as f : lines = f . readlines ( ) output_lines = [ ] for line in lines [ 1 : ] : if is_headline ( line ) : continue elif is_description ( line ) : output_lines . append ( click . style ( line . replace ( '>' , ' ' ) , fg = colors [ 'description' ] ) ) elif is_old_usage ( line ) : output_lines . append ( click . style ( line , fg = colors [ 'usage' ] ) ) elif is_code_example ( line ) : line = ' ' + line if line . startswith ( '`' ) else line [ 2 : ] output_lines . append ( click . style ( line . replace ( '`' , '' ) , fg = colors [ 'command' ] ) ) elif is_line_break ( line ) : output_lines . append ( click . style ( line ) ) else : output_lines . append ( click . style ( '- ' + line , fg = colors [ 'usage' ] ) ) return output_lines
Parse the command man page .
111
def configure_logging ( level = logging . DEBUG ) : if level == logging . DEBUG : logging . basicConfig ( level = logging . DEBUG , format = '%(asctime)s - %(levelname)s - %(message)s' ) return logging logger = logging . getLogger ( __name__ ) logger . setLevel ( level ) formatter = logging . Formatter ( '%(asctime)s - %(levelname)s - %(message)s' ) ch = logging . StreamHandler ( ) ch . setLevel ( level ) ch . setFormatter ( formatter ) logger . addHandler ( ch ) return logger
Configure the module logging engine .
112
def parse_username_password_hostname ( remote_url ) : assert remote_url assert ':' in remote_url if '@' in remote_url : username , hostname = remote_url . rsplit ( '@' , 1 ) else : username , hostname = None , remote_url hostname , remote_path = hostname . split ( ':' , 1 ) password = None if username and ':' in username : username , password = username . split ( ':' , 1 ) assert hostname assert remote_path return username , password , hostname , remote_path
Parse a command line string and return username password remote hostname and remote path .
113
def get_ssh_agent_keys ( logger ) : agent , agent_keys = None , None try : agent = paramiko . agent . Agent ( ) _agent_keys = agent . get_keys ( ) if not _agent_keys : agent . close ( ) logger . error ( "SSH agent didn't provide any valid key. Trying to continue..." ) else : agent_keys = tuple ( k for k in _agent_keys ) except paramiko . SSHException : if agent : agent . close ( ) agent = None logger . error ( "SSH agent speaks a non-compatible protocol. Ignoring it." ) finally : return agent , agent_keys
Ask the SSH agent for a list of keys and return it .
114
def create_parser ( ) : parser = argparse . ArgumentParser ( description = 'Sync a local and a remote folder through SFTP.' ) parser . add_argument ( "path" , type = str , metavar = "local-path" , help = "the path of the local folder" , ) parser . add_argument ( "remote" , type = str , metavar = "user[:password]@hostname:remote-path" , help = "the ssh-url ([user[:password]@]hostname:remote-path) of the remote folder. " "The hostname can be specified as a ssh_config's hostname too. " "Every missing information will be gathered from there" , ) parser . add_argument ( "-k" , "--key" , metavar = "identity-path" , action = "append" , help = "private key identity path (defaults to ~/.ssh/id_rsa)" ) parser . add_argument ( "-l" , "--logging" , choices = [ 'CRITICAL' , 'ERROR' , 'WARNING' , 'INFO' , 'DEBUG' , 'NOTSET' ] , default = 'ERROR' , help = "set logging level" ) parser . add_argument ( "-p" , "--port" , default = 22 , type = int , help = "SSH remote port (defaults to 22)" ) parser . add_argument ( "-f" , "--fix-symlinks" , action = "store_true" , help = "fix symbolic links on remote side" ) parser . add_argument ( "-a" , "--ssh-agent" , action = "store_true" , help = "enable ssh-agent support" ) parser . add_argument ( "-c" , "--ssh-config" , metavar = "ssh_config path" , default = "~/.ssh/config" , type = str , help = "path to the ssh-configuration file (default to ~/.ssh/config)" ) parser . add_argument ( "-n" , "--known-hosts" , metavar = "known_hosts path" , default = "~/.ssh/known_hosts" , type = str , help = "path to the openSSH known_hosts file" ) parser . add_argument ( "-d" , "--disable-known-hosts" , action = "store_true" , help = "disable known_hosts fingerprint checking (security warning!)" ) parser . add_argument ( "-e" , "--exclude-from" , metavar = "exclude-from-file-path" , type = str , help = "exclude files matching pattern in exclude-from-file-path" ) parser . add_argument ( "-t" , "--do-not-delete" , action = "store_true" , help = "do not delete remote files missing from local folder" ) parser . add_argument ( "-o" , "--allow-unknown" , action = "store_true" , help = "allow connection to unknown hosts" ) parser . add_argument ( "-r" , "--create-remote-directory" , action = "store_true" , help = "Create remote base directory if missing on remote" ) return parser
Create the CLI argument parser .
115
def main ( args = None ) : parser = create_parser ( ) args = vars ( parser . parse_args ( args ) ) log_mapping = { 'CRITICAL' : logging . CRITICAL , 'ERROR' : logging . ERROR , 'WARNING' : logging . WARNING , 'INFO' : logging . INFO , 'DEBUG' : logging . DEBUG , 'NOTSET' : logging . NOTSET , } log_level = log_mapping [ args [ 'logging' ] ] del ( args [ 'logging' ] ) global logger logger = configure_logging ( log_level ) args_mapping = { "path" : "local_path" , "remote" : "remote_url" , "ssh_config" : "ssh_config_path" , "exclude_from" : "exclude_file" , "known_hosts" : "known_hosts_path" , "do_not_delete" : "delete" , "key" : "identity_files" , } kwargs = { args_mapping [ k ] : v for k , v in args . items ( ) if v and k in args_mapping } kwargs . update ( { k : v for k , v in args . items ( ) if v and k not in args_mapping } ) if args [ 'disable_known_hosts' ] : kwargs [ 'known_hosts_path' ] = None del ( kwargs [ 'disable_known_hosts' ] ) if "delete" in kwargs : kwargs [ "delete" ] = not kwargs [ "delete" ] kwargs [ "identity_files" ] = kwargs . get ( "identity_files" , None ) or [ "~/.ssh/id_rsa" ] sync = SFTPClone ( ** kwargs ) sync . run ( )
The main .
116
def _must_be_deleted ( local_path , r_st ) : if not os . path . lexists ( local_path ) : return True l_st = os . lstat ( local_path ) if S_IFMT ( r_st . st_mode ) != S_IFMT ( l_st . st_mode ) : return True return False
Return True if the remote correspondent of local_path has to be deleted .
117
def file_upload ( self , local_path , remote_path , l_st ) : self . sftp . put ( local_path , remote_path ) self . _match_modes ( remote_path , l_st )
Upload local_path to remote_path and set permission and mtime .
118
def remote_delete ( self , remote_path , r_st ) : if S_ISDIR ( r_st . st_mode ) : for item in self . sftp . listdir_attr ( remote_path ) : full_path = path_join ( remote_path , item . filename ) self . remote_delete ( full_path , item ) self . sftp . rmdir ( remote_path ) else : try : self . sftp . remove ( remote_path ) except FileNotFoundError as e : self . logger . error ( "error while removing {}. trace: {}" . format ( remote_path , e ) )
Remove the remote directory node .
119
def check_for_deletion ( self , relative_path = None ) : if not relative_path : relative_path = str ( ) remote_path = path_join ( self . remote_path , relative_path ) local_path = path_join ( self . local_path , relative_path ) for remote_st in self . sftp . listdir_attr ( remote_path ) : r_lstat = self . sftp . lstat ( path_join ( remote_path , remote_st . filename ) ) inner_remote_path = path_join ( remote_path , remote_st . filename ) inner_local_path = path_join ( local_path , remote_st . filename ) if S_ISLNK ( r_lstat . st_mode ) : if self . _must_be_deleted ( inner_local_path , r_lstat ) : self . remote_delete ( inner_remote_path , r_lstat ) continue if self . _must_be_deleted ( inner_local_path , remote_st ) : self . remote_delete ( inner_remote_path , remote_st ) elif S_ISDIR ( remote_st . st_mode ) : self . check_for_deletion ( path_join ( relative_path , remote_st . filename ) )
Traverse the entire remote_path tree .
120
def create_update_symlink ( self , link_destination , remote_path ) : try : self . sftp . remove ( remote_path ) except IOError : pass finally : try : self . sftp . symlink ( link_destination , remote_path ) except OSError as e : self . logger . error ( "error while symlinking {} to {}: {}" . format ( remote_path , link_destination , e ) )
Create a new link pointing to link_destination in remote_path position .
121
def run ( self ) : try : self . sftp . stat ( self . remote_path ) except FileNotFoundError as e : if self . create_remote_directory : self . sftp . mkdir ( self . remote_path ) self . logger . info ( "Created missing remote dir: '" + self . remote_path + "'" ) else : self . logger . error ( "Remote folder does not exists. " "Add '-r' to create it if missing." ) sys . exit ( 1 ) try : if self . delete : self . check_for_deletion ( ) self . check_for_upload_create ( ) except FileNotFoundError : self . logger . error ( "Error while opening remote folder. Are you sure it does exist?" ) sys . exit ( 1 )
Run the sync .
122
def list_files ( start_path ) : s = u'\n' for root , dirs , files in os . walk ( start_path ) : level = root . replace ( start_path , '' ) . count ( os . sep ) indent = ' ' * 4 * level s += u'{}{}/\n' . format ( indent , os . path . basename ( root ) ) sub_indent = ' ' * 4 * ( level + 1 ) for f in files : s += u'{}{}\n' . format ( sub_indent , f ) return s
tree unix command replacement .
123
def file_tree ( start_path ) : nested_dirs = { } root_dir = start_path . rstrip ( os . sep ) start = root_dir . rfind ( os . sep ) + 1 for path , dirs , files in os . walk ( root_dir ) : folders = path [ start : ] . split ( os . sep ) subdir = dict . fromkeys ( files ) parent = reduce ( dict . get , folders [ : - 1 ] , nested_dirs ) parent [ folders [ - 1 ] ] = subdir return nested_dirs
Create a nested dictionary that represents the folder structure of start_path .
124
def capture_sys_output ( ) : capture_out , capture_err = StringIO ( ) , StringIO ( ) current_out , current_err = sys . stdout , sys . stderr try : sys . stdout , sys . stderr = capture_out , capture_err yield capture_out , capture_err finally : sys . stdout , sys . stderr = current_out , current_err
Capture standard output and error .
125
def suppress_logging ( log_level = logging . CRITICAL ) : logging . disable ( log_level ) yield logging . disable ( logging . NOTSET )
Suppress logging .
126
def override_env_variables ( ) : env_vars = ( "LOGNAME" , "USER" , "LNAME" , "USERNAME" ) old = [ os . environ [ v ] if v in os . environ else None for v in env_vars ] for v in env_vars : os . environ [ v ] = "test" yield for i , v in enumerate ( env_vars ) : if old [ i ] : os . environ [ v ] = old [ i ]
Override user environmental variables with custom one .
127
def get_config ( ) : config_path = path . join ( ( os . environ . get ( 'TLDR_CONFIG_DIR' ) or path . expanduser ( '~' ) ) , '.tldrrc' ) if not path . exists ( config_path ) : sys . exit ( "Can't find config file at: {0}. You may use `tldr init` " "to init the config file." . format ( config_path ) ) with io . open ( config_path , encoding = 'utf-8' ) as f : try : config = yaml . safe_load ( f ) except yaml . scanner . ScannerError : sys . exit ( "The config file is not a valid YAML file." ) supported_colors = [ 'black' , 'red' , 'green' , 'yellow' , 'blue' , 'magenta' , 'cyan' , 'white' ] if not set ( config [ 'colors' ] . values ( ) ) . issubset ( set ( supported_colors ) ) : sys . exit ( "Unsupported colors in config file: {0}." . format ( ', ' . join ( set ( config [ 'colors' ] . values ( ) ) - set ( supported_colors ) ) ) ) if not path . exists ( config [ 'repo_directory' ] ) : sys . exit ( "Can't find the tldr repo, check the `repo_directory` " "setting in config file." ) return config
Get the configurations from . tldrrc and return it as a dict .
128
def parse_man_page ( command , platform ) : page_path = find_page_location ( command , platform ) output_lines = parse_page ( page_path ) return output_lines
Parse the man page and return the parsed lines .
129
def find_page_location ( command , specified_platform ) : repo_directory = get_config ( ) [ 'repo_directory' ] default_platform = get_config ( ) [ 'platform' ] command_platform = ( specified_platform if specified_platform else default_platform ) with io . open ( path . join ( repo_directory , 'pages/index.json' ) , encoding = 'utf-8' ) as f : index = json . load ( f ) command_list = [ item [ 'name' ] for item in index [ 'commands' ] ] if command not in command_list : sys . exit ( ( "Sorry, we don't support command: {0} right now.\n" "You can file an issue or send a PR on github:\n" " https://github.com/tldr-pages/tldr" ) . format ( command ) ) supported_platforms = index [ 'commands' ] [ command_list . index ( command ) ] [ 'platform' ] if command_platform in supported_platforms : platform = command_platform elif 'common' in supported_platforms : platform = 'common' else : platform = '' if not platform : sys . exit ( ( "Sorry, command {0} is not supported on your platform.\n" "You can file an issue or send a PR on github:\n" " https://github.com/tldr-pages/tldr" ) . format ( command ) ) page_path = path . join ( path . join ( repo_directory , 'pages' ) , path . join ( platform , command + '.md' ) ) return page_path
Find the command man page in the pages directory .
130
def find ( command , on ) : output_lines = parse_man_page ( command , on ) click . echo ( '' . join ( output_lines ) )
Find the command usage .
131
def update ( ) : repo_directory = get_config ( ) [ 'repo_directory' ] os . chdir ( repo_directory ) click . echo ( "Check for updates..." ) local = subprocess . check_output ( 'git rev-parse master' . split ( ) ) . strip ( ) remote = subprocess . check_output ( 'git ls-remote https://github.com/tldr-pages/tldr/ HEAD' . split ( ) ) . split ( ) [ 0 ] if local != remote : click . echo ( "Updating..." ) subprocess . check_call ( 'git checkout master' . split ( ) ) subprocess . check_call ( 'git pull --rebase' . split ( ) ) build_index ( ) click . echo ( "Update to the latest and rebuild the index." ) else : click . echo ( "No need for updates." )
Update to the latest pages .
132
def init ( ) : default_config_path = path . join ( ( os . environ . get ( 'TLDR_CONFIG_DIR' ) or path . expanduser ( '~' ) ) , '.tldrrc' ) if path . exists ( default_config_path ) : click . echo ( "There is already a config file exists, " "skip initializing it." ) else : repo_path = click . prompt ( "Input the tldr repo path(absolute path)" ) if not path . exists ( repo_path ) : sys . exit ( "Repo path not exist, clone it first." ) platform = click . prompt ( "Input your platform(linux, osx or sunos)" ) if platform not in [ 'linux' , 'osx' , 'sunos' ] : sys . exit ( "Platform should be in linux, osx or sunos." ) colors = { "description" : "blue" , "usage" : "green" , "command" : "cyan" } config = { "repo_directory" : repo_path , "colors" : colors , "platform" : platform } with open ( default_config_path , 'w' ) as f : f . write ( yaml . safe_dump ( config , default_flow_style = False ) ) click . echo ( "Initializing the config file at {0}" . format ( default_config_path ) )
Init config file .
133
def locate ( command , on ) : location = find_page_location ( command , on ) click . echo ( location )
Locate the command s man page .
134
def map_to ( self , attrname , tablename = None , selectable = None , schema = None , base = None , mapper_args = util . immutabledict ( ) ) : if attrname in self . _cache : raise SQLSoupError ( "Attribute '%s' is already mapped to '%s'" % ( attrname , class_mapper ( self . _cache [ attrname ] ) . mapped_table ) ) if tablename is not None : if not isinstance ( tablename , basestring ) : raise ArgumentError ( "'tablename' argument must be a string." ) if selectable is not None : raise ArgumentError ( "'tablename' and 'selectable' " "arguments are mutually exclusive" ) selectable = Table ( tablename , self . _metadata , autoload = True , autoload_with = self . bind , schema = schema or self . schema ) elif schema : raise ArgumentError ( "'tablename' argument is required when " "using 'schema'." ) elif selectable is not None : if not isinstance ( selectable , expression . FromClause ) : raise ArgumentError ( "'selectable' argument must be a " "table, select, join, or other " "selectable construct." ) else : raise ArgumentError ( "'tablename' or 'selectable' argument is " "required." ) if not selectable . primary_key . columns and not 'primary_key' in mapper_args : if tablename : raise SQLSoupError ( "table '%s' does not have a primary " "key defined" % tablename ) else : raise SQLSoupError ( "selectable '%s' does not have a primary " "key defined" % selectable ) mapped_cls = _class_for_table ( self . session , self . engine , selectable , base or self . base , mapper_args ) self . _cache [ attrname ] = mapped_cls return mapped_cls
Configure a mapping to the given attrname .
135
def map ( self , selectable , base = None , ** mapper_args ) : return _class_for_table ( self . session , self . engine , selectable , base or self . base , mapper_args )
Map a selectable directly .
136
def with_labels ( self , selectable , base = None , ** mapper_args ) : return self . map ( expression . _clause_element_as_expr ( selectable ) . select ( use_labels = True ) . alias ( 'foo' ) , base = base , ** mapper_args )
Map a selectable directly wrapping the selectable in a subquery with labels .
137
def left ( self , f , n = 1 ) : intervals = self . intervals [ f . chrom ] if intervals == [ ] : return [ ] iright = binsearch_left_start ( intervals , f . start , 0 , len ( intervals ) ) + 1 ileft = binsearch_left_start ( intervals , f . start - self . max_len [ f . chrom ] - 1 , 0 , 0 ) results = sorted ( ( distance ( other , f ) , other ) for other in intervals [ ileft : iright ] if other . end < f . start and distance ( f , other ) != 0 ) if len ( results ) == n : return [ r [ 1 ] for r in results ] for i in range ( n , len ( results ) ) : if results [ i - 1 ] [ 0 ] != results [ i ] [ 0 ] : return [ r [ 1 ] for r in results [ : i ] ] if ileft == 0 : return [ r [ 1 ] for r in results ] 1 / 0
return the nearest n features strictly to the left of a Feature f . Overlapping features are not considered as to the left .
138
def right ( self , f , n = 1 ) : intervals = self . intervals [ f . chrom ] ilen = len ( intervals ) iright = binsearch_right_end ( intervals , f . end , 0 , ilen ) results = [ ] while iright < ilen : i = len ( results ) if i > n : if distance ( f , results [ i - 1 ] ) != distance ( f , results [ i - 2 ] ) : return results [ : i - 1 ] other = intervals [ iright ] iright += 1 if distance ( other , f ) == 0 : continue results . append ( other ) return results
return the nearest n features strictly to the right of a Feature f . Overlapping features are not considered as to the right .
139
def upstream ( self , f , n = 1 ) : if f . strand == - 1 : return self . right ( f , n ) return self . left ( f , n )
find n upstream features where upstream is determined by the strand of the query Feature f Overlapping features are not considered .
140
def downstream ( self , f , n = 1 ) : if f . strand == - 1 : return self . left ( f , n ) return self . right ( f , n )
find n downstream features where downstream is determined by the strand of the query Feature f Overlapping features are not considered .
141
def sequence ( db , chrom , start , end ) : url = "http://genome.ucsc.edu/cgi-bin/das/%s" % db url += "/dna?segment=%s:%i,%i" xml = U . urlopen ( url % ( chrom , start , end ) ) . read ( ) return _seq_from_xml ( xml )
return the sequence for a region using the UCSC DAS server . note the start is 1 - based each feature will have it s own . sequence method which sends the correct start and end to this function .
142
def set_table ( genome , table , table_name , connection_string , metadata ) : table = Table ( table_name , genome . _metadata , autoload = True , autoload_with = genome . bind , extend_existing = True ) for i , idx in enumerate ( table . indexes ) : idx . name = table_name + "." + idx . name + "_ix" + str ( i ) cols = [ ] for i , col in enumerate ( table . columns ) : if isinstance ( col . type , ( LONGBLOB , ENUM ) ) : if 'sqlite' in connection_string : col . type = VARCHAR ( ) elif 'postgres' in connection_string : if isinstance ( col . type , ENUM ) : col . type = PG_ENUM ( * col . type . enums , name = col . name , create_type = True ) else : col . type = VARCHAR ( ) elif str ( col . type ) == "VARCHAR" and ( "mysql" in connection_string or "postgres" in connection_string ) : if col . type . length is None : col . type . length = 48 if col . name != "description" else None if not "mysql" in connection_string : if str ( col . type ) . lower ( ) . startswith ( "set(" ) : col . type = VARCHAR ( 15 ) cols . append ( col ) table = Table ( table_name , genome . _metadata , * cols , autoload_replace = True , extend_existing = True ) return table
alter the table to work between different dialects
143
def mirror ( self , tables , dest_url ) : from mirror import mirror return mirror ( self , tables , dest_url )
miror a set of tables from dest_url
144
def dataframe ( self , table ) : from pandas import DataFrame if isinstance ( table , six . string_types ) : table = getattr ( self , table ) try : rec = table . first ( ) except AttributeError : rec = table [ 0 ] if hasattr ( table , "all" ) : records = table . all ( ) else : records = [ tuple ( t ) for t in table ] cols = [ c . name for c in rec . _table . columns ] return DataFrame . from_records ( records , columns = cols )
create a pandas dataframe from a table or query
145
def david_go ( refseq_list , annot = ( 'SP_PIR_KEYWORDS' , 'GOTERM_BP_FAT' , 'GOTERM_CC_FAT' , 'GOTERM_MF_FAT' ) ) : URL = "http://david.abcc.ncifcrf.gov/api.jsp?type=REFSEQ_MRNA&ids=%s&tool=term2term&annot=" import webbrowser webbrowser . open ( URL % "," . join ( set ( refseq_list ) ) + "," . join ( annot ) )
open a web - browser to the DAVID online enrichment tool
146
def bin_query ( self , table , chrom , start , end ) : if isinstance ( table , six . string_types ) : table = getattr ( self , table ) try : tbl = table . _table except AttributeError : tbl = table . column_descriptions [ 0 ] [ 'type' ] . _table q = table . filter ( tbl . c . chrom == chrom ) if hasattr ( tbl . c , "bin" ) : bins = Genome . bins ( start , end ) if len ( bins ) < 100 : q = q . filter ( tbl . c . bin . in_ ( bins ) ) if hasattr ( tbl . c , "txStart" ) : return q . filter ( tbl . c . txStart <= end ) . filter ( tbl . c . txEnd >= start ) return q . filter ( tbl . c . chromStart <= end ) . filter ( tbl . c . chromEnd >= start )
perform an efficient spatial query using the bin column if available . The possible bins are calculated from the start and end sent to this function .
147
def upstream ( self , table , chrom_or_feat , start = None , end = None , k = 1 ) : res = self . knearest ( table , chrom_or_feat , start , end , k , "up" ) end = getattr ( chrom_or_feat , "end" , end ) start = getattr ( chrom_or_feat , "start" , start ) rev = getattr ( chrom_or_feat , "strand" , "+" ) == "-" if rev : return [ x for x in res if x . end > start ] else : return [ x for x in res if x . start < end ]
Return k - nearest upstream features
148
def knearest ( self , table , chrom_or_feat , start = None , end = None , k = 1 , _direction = None ) : assert _direction in ( None , "up" , "down" ) if start is None : assert end is None chrom , start , end = chrom_or_feat . chrom , chrom_or_feat . start , chrom_or_feat . end if _direction in ( "up" , "down" ) and getattr ( chrom_or_feat , "strand" , None ) == "-" : _direction = "up" if _direction == "down" else "up" else : chrom = chrom_or_feat qstart , qend = long ( start ) , long ( end ) res = self . bin_query ( table , chrom , qstart , qend ) i , change = 1 , 350 try : while res . count ( ) < k : if _direction in ( None , "up" ) : if qstart == 0 and _direction == "up" : break qstart = max ( 0 , qstart - change ) if _direction in ( None , "down" ) : qend += change i += 1 change *= ( i + 5 ) res = self . bin_query ( table , chrom , qstart , qend ) except BigException : return [ ] def dist ( f ) : d = 0 if start > f . end : d = start - f . end elif f . start > end : d = f . start - end return d dists = sorted ( [ ( dist ( f ) , f ) for f in res ] ) if len ( dists ) == 0 : return [ ] dists , res = zip ( * dists ) if len ( res ) == k : return res if k > len ( res ) : if k == 0 : return [ ] k = len ( res ) ndist = dists [ k - 1 ] while k < len ( res ) and dists [ k ] == ndist : k = k + 1 return res [ : k ]
Return k - nearest features
149
def annotate ( self , fname , tables , feature_strand = False , in_memory = False , header = None , out = sys . stdout , parallel = False ) : from . annotate import annotate return annotate ( self , fname , tables , feature_strand , in_memory , header = header , out = out , parallel = parallel )
annotate a file with a number of tables
150
def bins ( start , end ) : if end - start < 536870912 : offsets = [ 585 , 73 , 9 , 1 ] else : raise BigException offsets = [ 4681 , 585 , 73 , 9 , 1 ] binFirstShift = 17 binNextShift = 3 start = start >> binFirstShift end = ( end - 1 ) >> binFirstShift bins = [ 1 ] for offset in offsets : bins . extend ( range ( offset + start , offset + end + 1 ) ) start >>= binNextShift end >>= binNextShift return frozenset ( bins )
Get all the bin numbers for a particular interval defined by ( start end ]
151
def _find_filepath_in_roots ( filename ) : for root in settings . DJANGO_STATIC_MEDIA_ROOTS : filepath = _filename2filepath ( filename , root ) if os . path . isfile ( filepath ) : return filepath , root if settings . DEBUG : try : from django . contrib . staticfiles import finders absolute_path = finders . find ( filename ) if absolute_path : root , filepath = os . path . split ( absolute_path ) return absolute_path , root except ImportError : pass return None , None
Look for filename in all MEDIA_ROOTS and return the first one found .
152
def default_combine_filenames_generator ( filenames , max_length = 40 ) : path = None names = [ ] extension = None timestamps = [ ] for filename in filenames : name = os . path . basename ( filename ) if not extension : extension = os . path . splitext ( name ) [ 1 ] elif os . path . splitext ( name ) [ 1 ] != extension : raise ValueError ( "Can't combine multiple file extensions" ) for each in re . finditer ( '\.\d{10}\.' , name ) : timestamps . append ( int ( each . group ( ) . replace ( '.' , '' ) ) ) name = name . replace ( each . group ( ) , '.' ) name = os . path . splitext ( name ) [ 0 ] names . append ( name ) if path is None : path = os . path . dirname ( filename ) else : if len ( os . path . dirname ( filename ) ) < len ( path ) : path = os . path . dirname ( filename ) new_filename = '_' . join ( names ) if timestamps : new_filename += ".%s" % max ( timestamps ) new_filename = new_filename [ : max_length ] new_filename += extension return os . path . join ( path , new_filename )
Return a new filename to use as the combined file name for a bunch of files . A precondition is that they all have the same file extension
153
def overlaps ( self , other ) : if self . chrom != other . chrom : return False if self . start >= other . end : return False if other . start >= self . end : return False return True
check for overlap with the other interval
154
def is_upstream_of ( self , other ) : if self . chrom != other . chrom : return None if getattr ( other , "strand" , None ) == "+" : return self . end <= other . start return self . start >= other . end
check if this is upstream of the other interval taking the strand of the other interval into account
155
def gene_features ( self ) : nm , strand = self . gene_name , self . strand feats = [ ( self . chrom , self . start , self . end , nm , strand , 'gene' ) ] for feat in ( 'introns' , 'exons' , 'utr5' , 'utr3' , 'cdss' ) : fname = feat [ : - 1 ] if feat [ - 1 ] == 's' else feat res = getattr ( self , feat ) if res is None or all ( r is None for r in res ) : continue if not isinstance ( res , list ) : res = [ res ] feats . extend ( ( self . chrom , s , e , nm , strand , fname ) for s , e in res ) tss = self . tss ( down = 1 ) if tss is not None : feats . append ( ( self . chrom , tss [ 0 ] , tss [ 1 ] , nm , strand , 'tss' ) ) prom = self . promoter ( ) feats . append ( ( self . chrom , prom [ 0 ] , prom [ 1 ] , nm , strand , 'promoter' ) ) return sorted ( feats , key = itemgetter ( 1 ) )
return a list of features for the gene features of this object . This would include exons introns utrs etc .
156
def tss ( self , up = 0 , down = 0 ) : if not self . is_gene_pred : return None tss = self . txEnd if self . strand == '-' else self . txStart start , end = tss , tss if self . strand == '+' : start -= up end += down else : start += up end -= down start , end = end , start return max ( 0 , start ) , max ( end , start , 0 )
Return a start end tuple of positions around the transcription - start site
157
def promoter ( self , up = 2000 , down = 0 ) : if not self . is_gene_pred : return None return self . tss ( up = up , down = down )
Return a start end tuple of positions for the promoter region of this gene
158
def cds ( self ) : ces = self . coding_exons if len ( ces ) < 1 : return ces ces [ 0 ] = ( self . cdsStart , ces [ 0 ] [ 1 ] ) ces [ - 1 ] = ( ces [ - 1 ] [ 0 ] , self . cdsEnd ) assert all ( ( s < e for s , e in ces ) ) return ces
just the parts of the exons that are translated
159
def is_downstream_of ( self , other ) : if self . chrom != other . chrom : return None if getattr ( other , "strand" , None ) == "-" : return self . end <= other . start return self . start >= other . end
return a boolean indicating whether this feature is downstream of other taking the strand of other into account
160
def utr5 ( self ) : if not self . is_coding or len ( self . exons ) < 2 : return ( None , None ) if self . strand == "+" : s , e = ( self . txStart , self . cdsStart ) else : s , e = ( self . cdsEnd , self . txEnd ) if s == e : return ( None , None ) return s , e
return the 5 UTR if appropriate
161
def sequence ( self , per_exon = False ) : db = self . db if not per_exon : start = self . txStart + 1 return _sequence ( db , self . chrom , start , self . txEnd ) else : seqs = [ ] for start , end in self . exons : seqs . append ( _sequence ( db , self . chrom , start + 1 , end ) ) return seqs
Return the sequence for this feature . if per - exon is True return an array of exon sequences This sequence is never reverse complemented
162
def ncbi_blast ( self , db = "nr" , megablast = True , sequence = None ) : import requests requests . defaults . max_retries = 4 assert sequence in ( None , "cds" , "mrna" ) seq = self . sequence ( ) if sequence is None else ( "" . join ( self . cds_sequence if sequence == "cds" else self . mrna_sequence ) ) r = requests . post ( 'http://blast.ncbi.nlm.nih.gov/Blast.cgi' , timeout = 20 , data = dict ( PROGRAM = "blastn" , DESCRIPTIONS = 100 , ALIGNMENTS = 0 , FILTER = "L" , CMD = "Put" , MEGABLAST = True , DATABASE = db , QUERY = ">%s\n%s" % ( self . name , seq ) ) ) if not ( "RID =" in r . text and "RTOE" in r . text ) : print ( "no results" , file = sys . stderr ) raise StopIteration rid = r . text . split ( "RID = " ) [ 1 ] . split ( "\n" ) [ 0 ] import time time . sleep ( 4 ) print ( "checking..." , file = sys . stderr ) r = requests . post ( 'http://blast.ncbi.nlm.nih.gov/Blast.cgi' , data = dict ( RID = rid , format = "Text" , DESCRIPTIONS = 100 , DATABASE = db , CMD = "Get" , ) ) while "Status=WAITING" in r . text : print ( "checking..." , file = sys . stderr ) time . sleep ( 10 ) r = requests . post ( 'http://blast.ncbi.nlm.nih.gov/Blast.cgi' , data = dict ( RID = rid , format = "Text" , CMD = "Get" , ) ) for rec in _ncbi_parse ( r . text ) : yield rec
perform an NCBI blast against the sequence of this feature
163
def blat ( self , db = None , sequence = None , seq_type = "DNA" ) : from . blat_blast import blat , blat_all assert sequence in ( None , "cds" , "mrna" ) seq = self . sequence ( ) if sequence is None else ( "" . join ( self . cds_sequence if sequence == "cds" else self . mrna_sequence ) ) if isinstance ( db , ( tuple , list ) ) : return blat_all ( seq , self . gene_name , db , seq_type ) else : return blat ( seq , self . gene_name , db or self . db , seq_type )
make a request to the genome - browsers BLAT interface sequence is one of None mrna cds returns a list of features that are hits to this sequence .
164
def bed ( self , * attrs , ** kwargs ) : exclude = ( "chrom" , "start" , "end" , "txStart" , "txEnd" , "chromStart" , "chromEnd" ) if self . is_gene_pred : return self . bed12 ( ** kwargs ) return "\t" . join ( map ( str , ( [ self . chrom , self . start , self . end ] + [ getattr ( self , attr ) for attr in attrs if not attr in exclude ] ) ) )
return a bed formatted string of this feature
165
def dereference_url ( url ) : res = open_url ( url , method = 'HEAD' ) res . close ( ) return res . url
Makes a HEAD request to find the final destination of a URL after following any redirects
166
def read ( url , ** kwargs ) : response = open_url ( url , ** kwargs ) try : return response . read ( ) finally : response . close ( )
Read the contents of a URL into memory return
167
def check_extracted_paths ( namelist , subdir = None ) : def relpath ( p ) : q = os . path . relpath ( p ) if p . endswith ( os . path . sep ) or p . endswith ( '/' ) : q += os . path . sep return q parent = os . path . abspath ( '.' ) if subdir : if os . path . isabs ( subdir ) : raise FileException ( 'subdir must be a relative path' , subdir ) subdir = relpath ( subdir + os . path . sep ) for name in namelist : if os . path . commonprefix ( [ parent , os . path . abspath ( name ) ] ) != parent : raise FileException ( 'Insecure path in zipfile' , name ) if subdir and os . path . commonprefix ( [ subdir , relpath ( name ) ] ) != subdir : raise FileException ( 'Path in zipfile is not in required subdir' , name )
Check whether zip file paths are all relative and optionally in a specified subdirectory raises an exception if not
168
def get_as_local_path ( path , overwrite , progress = 0 , httpuser = None , httppassword = None ) : m = re . match ( '([A-Za-z]+)://' , path ) if m : log . debug ( 'Detected URL protocol: %s' , m . group ( 1 ) ) localpath = path . split ( '/' ) [ - 1 ] if not localpath : raise FileException ( 'Remote path appears to be a directory' , path ) if os . path . exists ( localpath ) : if overwrite == 'error' : raise FileException ( 'File already exists' , localpath ) elif overwrite == 'keep' : log . info ( 'Keeping existing %s' , localpath ) elif overwrite == 'backup' : rename_backup ( localpath ) download ( path , localpath , progress , httpuser = httpuser , httppassword = httppassword ) else : raise Exception ( 'Invalid overwrite flag: %s' % overwrite ) else : download ( path , localpath , progress , httpuser = httpuser , httppassword = httppassword ) else : localpath = path log . debug ( "Local path: %s" , localpath ) if os . path . isdir ( localpath ) : return 'directory' , localpath if os . path . exists ( localpath ) : return 'file' , localpath raise Exception ( 'Local path does not exist: %s' % localpath )
Automatically handle local and remote URLs files and directories
169
def create ( fs , channels , application ) : result_code = ctypes . c_int ( ) result = _create ( fs , channels , application , ctypes . byref ( result_code ) ) if result_code . value is not constants . OK : raise OpusError ( result_code . value ) return result
Allocates and initializes an encoder state .
170
def encode ( encoder , pcm , frame_size , max_data_bytes ) : pcm = ctypes . cast ( pcm , c_int16_pointer ) data = ( ctypes . c_char * max_data_bytes ) ( ) result = _encode ( encoder , pcm , frame_size , data , max_data_bytes ) if result < 0 : raise OpusError ( result ) return array . array ( 'c' , data [ : result ] ) . tostring ( )
Encodes an Opus frame
171
def encode_float ( encoder , pcm , frame_size , max_data_bytes ) : pcm = ctypes . cast ( pcm , c_float_pointer ) data = ( ctypes . c_char * max_data_bytes ) ( ) result = _encode_float ( encoder , pcm , frame_size , data , max_data_bytes ) if result < 0 : raise OpusError ( result ) return array . array ( 'c' , data [ : result ] ) . tostring ( )
Encodes an Opus frame from floating point input
172
def __parse_tostr ( self , text , ** kwargs ) : n = self . options . get ( 'nbest' , 1 ) if self . _KW_BOUNDARY in kwargs : patt = kwargs . get ( self . _KW_BOUNDARY , '.' ) tokens = list ( self . __split_pattern ( text , patt ) ) text = '' . join ( [ t [ 0 ] for t in tokens ] ) btext = self . __str2bytes ( text ) self . __mecab . mecab_lattice_set_sentence ( self . lattice , btext ) bpos = 0 self . __mecab . mecab_lattice_set_boundary_constraint ( self . lattice , bpos , self . MECAB_TOKEN_BOUNDARY ) for ( token , match ) in tokens : bpos += 1 if match : mark = self . MECAB_INSIDE_TOKEN else : mark = self . MECAB_ANY_BOUNDARY for _ in range ( 1 , len ( self . __str2bytes ( token ) ) ) : self . __mecab . mecab_lattice_set_boundary_constraint ( self . lattice , bpos , mark ) bpos += 1 self . __mecab . mecab_lattice_set_boundary_constraint ( self . lattice , bpos , self . MECAB_TOKEN_BOUNDARY ) elif self . _KW_FEATURE in kwargs : features = kwargs . get ( self . _KW_FEATURE , ( ) ) fd = { morph : self . __str2bytes ( feat ) for morph , feat in features } tokens = self . __split_features ( text , [ e [ 0 ] for e in features ] ) text = '' . join ( [ t [ 0 ] for t in tokens ] ) btext = self . __str2bytes ( text ) self . __mecab . mecab_lattice_set_sentence ( self . lattice , btext ) bpos = 0 for chunk , match in tokens : c = len ( self . __str2bytes ( chunk ) ) if match == True : self . __mecab . mecab_lattice_set_feature_constraint ( self . lattice , bpos , bpos + c , fd [ chunk ] ) bpos += c else : btext = self . __str2bytes ( text ) self . __mecab . mecab_lattice_set_sentence ( self . lattice , btext ) self . __mecab . mecab_parse_lattice ( self . tagger , self . lattice ) if n > 1 : res = self . __mecab . mecab_lattice_nbest_tostr ( self . lattice , n ) else : res = self . __mecab . mecab_lattice_tostr ( self . lattice ) if res != self . __ffi . NULL : raw = self . __ffi . string ( res ) return self . __bytes2str ( raw ) . strip ( ) else : err = self . __mecab . mecab_lattice_strerror ( self . lattice ) logger . error ( self . __bytes2str ( self . __ffi . string ( err ) ) ) raise MeCabError ( self . __bytes2str ( self . __ffi . string ( err ) ) )
Builds and returns the MeCab function for parsing Unicode text .
173
def parse ( self , text , ** kwargs ) : if text is None : logger . error ( self . _ERROR_EMPTY_STR ) raise MeCabError ( self . _ERROR_EMPTY_STR ) elif not isinstance ( text , str ) : logger . error ( self . _ERROR_NOTSTR ) raise MeCabError ( self . _ERROR_NOTSTR ) elif 'partial' in self . options and not text . endswith ( "\n" ) : logger . error ( self . _ERROR_MISSING_NL ) raise MeCabError ( self . _ERROR_MISSING_NL ) if self . _KW_BOUNDARY in kwargs : val = kwargs [ self . _KW_BOUNDARY ] if not isinstance ( val , self . _REGEXTYPE ) and not isinstance ( val , str ) : logger . error ( self . _ERROR_BOUNDARY ) raise MeCabError ( self . _ERROR_BOUNDARY ) elif self . _KW_FEATURE in kwargs : val = kwargs [ self . _KW_FEATURE ] if not isinstance ( val , tuple ) : logger . error ( self . _ERROR_FEATURE ) raise MeCabError ( self . _ERROR_FEATURE ) as_nodes = kwargs . get ( self . _KW_ASNODES , False ) if as_nodes : return self . __parse_tonodes ( text , ** kwargs ) else : return self . __parse_tostr ( text , ** kwargs )
Parse the given text and return result from MeCab .
174
def generate ( tagGroups , terms ) : rv = [ ] for pid in tagGroups : if pid not in terms . keys ( ) : continue groupData = terms [ pid ] groupName = "[%s] %s" % ( pid , groupData [ 'name' ] ) groupDesc = groupData [ 'desc' ] children = [ ] group = dict ( name = groupName , desc = groupDesc , set = children ) rv . append ( group ) for cid in groupData [ 'children' ] : cData = terms [ cid ] cName = "[%s] %s" % ( cid , cData [ 'name' ] ) cDesc = cData [ 'desc' ] child = dict ( name = cName , desc = cDesc ) children . append ( child ) return json . dumps ( rv , indent = 2 )
create Tag Groups and Child Tags using data from terms dict
175
def _handle_args ( self , cmd , args ) : if cmd == 'install' : if args . upgrade : if args . initdb or args . upgradedb : raise Stop ( 10 , ( 'Deprecated --initdb --upgradedb flags ' 'are incompatible with --upgrade' ) ) newinstall = None else : newinstall = True if args . managedb : if args . initdb or args . upgradedb : raise Stop ( 10 , ( 'Deprecated --initdb --upgradedb flags ' 'are incompatible with --managedb' ) ) args . initdb = True args . upgradedb = True else : if args . initdb or args . upgradedb : log . warn ( '--initdb and --upgradedb are deprecated, ' 'use --managedb' ) elif cmd == 'upgrade' : log . warn ( '"omero upgrade" is deprecated, use "omego install --upgrade"' ) cmd = 'install' args . upgrade = True newinstall = False else : raise Exception ( 'Unexpected command: %s' % cmd ) return args , newinstall
We need to support deprecated behaviour for now which makes this quite complicated
176
def handle_database ( self ) : if self . args . initdb or self . args . upgradedb : db = DbAdmin ( self . dir , None , self . args , self . external ) status = db . check ( ) log . debug ( 'OMERO database upgrade status: %s' , status ) else : log . warn ( 'OMERO database check disabled' ) return DB_INIT_NEEDED if status == DB_INIT_NEEDED : if self . args . initdb : log . debug ( 'Initialising OMERO database' ) db . init ( ) else : log . error ( 'OMERO database not found' ) raise Stop ( DB_INIT_NEEDED , 'Install/Upgrade failed: OMERO database not found' ) elif status == DB_UPGRADE_NEEDED : log . warn ( 'OMERO database exists but is out of date' ) if self . args . upgradedb : log . debug ( 'Upgrading OMERO database' ) db . upgrade ( ) else : raise Stop ( DB_UPGRADE_NEEDED , 'Pass --managedb or upgrade your OMERO database manually' ) else : assert status == DB_UPTODATE return status
Handle database initialisation and upgrade taking into account command line arguments
177
def run ( self , command ) : if isinstance ( command , basestring ) : command = command . split ( ) else : command = list ( command ) self . external . omero_cli ( command )
Runs a command as if from the command - line without the need for using popen or subprocess
178
def sort_schemas ( schemas ) : def keyfun ( v ) : x = SQL_SCHEMA_REGEXP . match ( v ) . groups ( ) return ( int ( x [ 0 ] ) , x [ 1 ] , int ( x [ 2 ] ) if x [ 2 ] else None , x [ 3 ] if x [ 3 ] else 'zzz' , int ( x [ 4 ] ) ) return sorted ( schemas , key = keyfun )
Sort a list of SQL schemas in order
179
def parse_schema_files ( files ) : f_dict = { } for f in files : root , ext = os . path . splitext ( f ) if ext != ".sql" : continue vto , vfrom = os . path . split ( root ) vto = os . path . split ( vto ) [ 1 ] if is_schema ( vto ) and is_schema ( vfrom ) : f_dict [ f ] = ( vfrom , vto ) return f_dict
Parse a list of SQL files and return a dictionary of valid schema files where each key is a valid schema file and the corresponding value is a tuple containing the source and the target schema .
180
def dump ( self ) : dumpfile = self . args . dumpfile if not dumpfile : db , env = self . get_db_args_env ( ) dumpfile = fileutils . timestamp_filename ( 'omero-database-%s' % db [ 'name' ] , 'pgdump' ) log . info ( 'Dumping database to %s' , dumpfile ) if not self . args . dry_run : self . pgdump ( '-Fc' , '-f' , dumpfile )
Dump the database using the postgres custom format
181
def get_db_args_env ( self ) : db = { 'name' : self . args . dbname , 'host' : self . args . dbhost , 'user' : self . args . dbuser , 'pass' : self . args . dbpass } if not self . args . no_db_config : try : c = self . external . get_config ( force = True ) except Exception as e : log . warn ( 'config.xml not found: %s' , e ) c = { } for k in db : try : db [ k ] = c [ 'omero.db.%s' % k ] except KeyError : log . info ( 'Failed to lookup parameter omero.db.%s, using %s' , k , db [ k ] ) if not db [ 'name' ] : raise Exception ( 'Database name required' ) env = os . environ . copy ( ) env [ 'PGPASSWORD' ] = db [ 'pass' ] return db , env
Get a dictionary of database connection parameters and create an environment for running postgres commands . Falls back to omego defaults .
182
def psql ( self , * psqlargs ) : db , env = self . get_db_args_env ( ) args = [ '-v' , 'ON_ERROR_STOP=on' , '-d' , db [ 'name' ] , '-h' , db [ 'host' ] , '-U' , db [ 'user' ] , '-w' , '-A' , '-t' ] + list ( psqlargs ) stdout , stderr = External . run ( 'psql' , args , capturestd = True , env = env ) if stderr : log . warn ( 'stderr: %s' , stderr ) log . debug ( 'stdout: %s' , stdout ) return stdout
Run a psql command
183
def pgdump ( self , * pgdumpargs ) : db , env = self . get_db_args_env ( ) args = [ '-d' , db [ 'name' ] , '-h' , db [ 'host' ] , '-U' , db [ 'user' ] , '-w' ] + list ( pgdumpargs ) stdout , stderr = External . run ( 'pg_dump' , args , capturestd = True , env = env ) if stderr : log . warn ( 'stderr: %s' , stderr ) log . debug ( 'stdout: %s' , stdout ) return stdout
Run a pg_dump command
184
def set_server_dir ( self , dir ) : self . dir = os . path . abspath ( dir ) config = os . path . join ( self . dir , 'etc' , 'grid' , 'config.xml' ) self . configured = os . path . exists ( config )
Set the directory of the server to be controlled
185
def get_config ( self , force = False ) : if not force and not self . has_config ( ) : raise Exception ( 'No config file' ) configxml = os . path . join ( self . dir , 'etc' , 'grid' , 'config.xml' ) if not os . path . exists ( configxml ) : raise Exception ( 'No config file' ) try : c = self . _omero . config . ConfigXml ( configxml , exclusive = False , read_only = True ) except TypeError : c = self . _omero . config . ConfigXml ( configxml , exclusive = False ) try : return c . as_map ( ) finally : c . close ( )
Returns a dictionary of all config . xml properties
186
def setup_omero_cli ( self ) : if not self . dir : raise Exception ( 'No server directory set' ) if 'omero.cli' in sys . modules : raise Exception ( 'omero.cli can only be imported once' ) log . debug ( "Setting up omero CLI" ) lib = os . path . join ( self . dir , "lib" , "python" ) if not os . path . exists ( lib ) : raise Exception ( "%s does not exist!" % lib ) sys . path . insert ( 0 , lib ) import omero import omero . cli log . debug ( "Using omero CLI from %s" , omero . cli . __file__ ) self . cli = omero . cli . CLI ( ) self . cli . loadplugins ( ) self . _omero = omero
Imports the omero CLI module so that commands can be run directly . Note Python does not allow a module to be imported multiple times so this will only work with a single omero instance .
187
def setup_previous_omero_env ( self , olddir , savevarsfile ) : env = self . get_environment ( savevarsfile ) def addpath ( varname , p ) : if not os . path . exists ( p ) : raise Exception ( "%s does not exist!" % p ) current = env . get ( varname ) if current : env [ varname ] = p + os . pathsep + current else : env [ varname ] = p olddir = os . path . abspath ( olddir ) lib = os . path . join ( olddir , "lib" , "python" ) addpath ( "PYTHONPATH" , lib ) bin = os . path . join ( olddir , "bin" ) addpath ( "PATH" , bin ) self . old_env = env
Create a copy of the current environment for interacting with the current OMERO server installation
188
def omero_cli ( self , command ) : assert isinstance ( command , list ) if not self . cli : raise Exception ( 'omero.cli not initialised' ) log . info ( "Invoking CLI [current environment]: %s" , " " . join ( command ) ) self . cli . invoke ( command , strict = True )
Runs a command as if from the OMERO command - line without the need for using popen or subprocess .
189
def run ( exe , args , capturestd = False , env = None ) : command = [ exe ] + args if env : log . info ( "Executing [custom environment]: %s" , " " . join ( command ) ) else : log . info ( "Executing : %s" , " " . join ( command ) ) start = time . time ( ) outfile = None errfile = None if capturestd : outfile = tempfile . TemporaryFile ( ) errfile = tempfile . TemporaryFile ( ) r = subprocess . call ( command , env = env , stdout = outfile , stderr = errfile , shell = WINDOWS ) stdout = None stderr = None if capturestd : outfile . seek ( 0 ) stdout = outfile . read ( ) outfile . close ( ) errfile . seek ( 0 ) stderr = errfile . read ( ) errfile . close ( ) end = time . time ( ) if r != 0 : log . error ( "Failed [%.3f s]" , end - start ) raise RunException ( "Non-zero return code" , exe , args , r , stdout , stderr ) log . info ( "Completed [%.3f s]" , end - start ) return stdout , stderr
Runs an executable with an array of arguments optionally in the specified environment . Returns stdout and stderr
190
def string_support ( py3enc ) : if sys . version < '3' : def bytes2str ( b ) : return b def str2bytes ( s ) : return s else : def bytes2str ( b ) : return b . decode ( py3enc ) def str2bytes ( u ) : return u . encode ( py3enc ) return ( bytes2str , str2bytes )
Create byte - to - string and string - to - byte conversion functions for internal use .
191
def splitter_support ( py2enc ) : if sys . version < '3' : def _fn_sentence ( pattern , sentence ) : if REGEXTYPE == type ( pattern ) : if pattern . flags & re . UNICODE : return sentence . decode ( py2enc ) else : return sentence else : return sentence def _fn_token2str ( pattern ) : if REGEXTYPE == type ( pattern ) : if pattern . flags & re . UNICODE : def _fn ( token ) : return token . encode ( py2enc ) else : def _fn ( token ) : return token else : def _fn ( token ) : return token return _fn else : def _fn_sentence ( pattern , sentence ) : return sentence def _fn_token2str ( pattern ) : def _fn ( token ) : return token return _fn def _fn_tokenize_pattern ( text , pattern ) : pos = 0 sentence = _fn_sentence ( pattern , text ) postprocess = _fn_token2str ( pattern ) for m in re . finditer ( pattern , sentence ) : if pos < m . start ( ) : token = postprocess ( sentence [ pos : m . start ( ) ] ) yield ( token . strip ( ) , False ) pos = m . start ( ) token = postprocess ( sentence [ pos : m . end ( ) ] ) yield ( token . strip ( ) , True ) pos = m . end ( ) if pos < len ( sentence ) : token = postprocess ( sentence [ pos : ] ) yield ( token . strip ( ) , False ) def _fn_tokenize_features ( text , features ) : acc = [ ] acc . append ( ( text . strip ( ) , False ) ) for feat in features : for i , e in enumerate ( acc ) : if e [ 1 ] == False : tmp = list ( _fn_tokenize_pattern ( e [ 0 ] , feat ) ) if len ( tmp ) > 0 : acc . pop ( i ) acc [ i : i ] = tmp return acc return _fn_tokenize_pattern , _fn_tokenize_features
Create tokenizer for use in boundary constraint parsing .
192
def upsert ( self , doc , namespace , timestamp , update_spec = None ) : index , doc_type = self . _index_and_mapping ( namespace ) doc_id = u ( doc . pop ( "_id" ) ) metadata = { 'ns' : namespace , '_ts' : timestamp } action = { '_op_type' : 'index' , '_index' : index , '_type' : doc_type , '_id' : doc_id , '_source' : self . _formatter . format_document ( doc ) } meta_action = { '_op_type' : 'index' , '_index' : self . meta_index_name , '_type' : self . meta_type , '_id' : doc_id , '_source' : bson . json_util . dumps ( metadata ) } self . index ( action , meta_action , doc , update_spec ) doc [ '_id' ] = doc_id
Insert a document into Elasticsearch .
193
def bulk_upsert ( self , docs , namespace , timestamp ) : def docs_to_upsert ( ) : doc = None for doc in docs : index , doc_type = self . _index_and_mapping ( namespace ) doc_id = u ( doc . pop ( "_id" ) ) document_action = { '_index' : index , '_type' : doc_type , '_id' : doc_id , '_source' : self . _formatter . format_document ( doc ) } document_meta = { '_index' : self . meta_index_name , '_type' : self . meta_type , '_id' : doc_id , '_source' : { 'ns' : namespace , '_ts' : timestamp } } yield document_action yield document_meta if doc is None : raise errors . EmptyDocsError ( "Cannot upsert an empty sequence of " "documents into Elastic Search" ) try : kw = { } if self . chunk_size > 0 : kw [ 'chunk_size' ] = self . chunk_size responses = streaming_bulk ( client = self . elastic , actions = docs_to_upsert ( ) , ** kw ) for ok , resp in responses : if not ok : LOG . error ( "Could not bulk-upsert document " "into ElasticSearch: %r" % resp ) if self . auto_commit_interval == 0 : self . commit ( ) except errors . EmptyDocsError : pass
Insert multiple documents into Elasticsearch .
194
def remove ( self , document_id , namespace , timestamp ) : index , doc_type = self . _index_and_mapping ( namespace ) action = { '_op_type' : 'delete' , '_index' : index , '_type' : doc_type , '_id' : u ( document_id ) } meta_action = { '_op_type' : 'delete' , '_index' : self . meta_index_name , '_type' : self . meta_type , '_id' : u ( document_id ) } self . index ( action , meta_action )
Remove a document from Elasticsearch .
195
def send_buffered_operations ( self ) : with self . lock : try : action_buffer = self . BulkBuffer . get_buffer ( ) if action_buffer : successes , errors = bulk ( self . elastic , action_buffer ) LOG . debug ( "Bulk request finished, successfully sent %d " "operations" , successes ) if errors : LOG . error ( "Bulk request finished with errors: %r" , errors ) except es_exceptions . ElasticsearchException : LOG . exception ( "Bulk request failed with exception" )
Send buffered operations to Elasticsearch .
196
def get_last_doc ( self ) : try : result = self . elastic . search ( index = self . meta_index_name , body = { "query" : { "match_all" : { } } , "sort" : [ { "_ts" : "desc" } ] , } , size = 1 ) [ "hits" ] [ "hits" ] for r in result : r [ '_source' ] [ '_id' ] = r [ '_id' ] return r [ '_source' ] except es_exceptions . RequestError : return None
Get the most recently modified document from Elasticsearch .
197
def parse_type_signature ( sig ) : match = TYPE_SIG_RE . match ( sig . strip ( ) ) if not match : raise RuntimeError ( 'Type signature invalid, got ' + sig ) groups = match . groups ( ) typ = groups [ 0 ] generic_types = groups [ 1 ] if not generic_types : generic_types = [ ] else : generic_types = split_sig ( generic_types [ 1 : - 1 ] ) is_array = ( groups [ 2 ] is not None ) return typ , generic_types , is_array
Parse a type signature
198
def parse_attr_signature ( sig ) : match = ATTR_SIG_RE . match ( sig . strip ( ) ) if not match : raise RuntimeError ( 'Attribute signature invalid, got ' + sig ) name , _ , params = match . groups ( ) if params is not None and params . strip ( ) != '' : params = split_sig ( params ) params = [ parse_param_signature ( x ) for x in params ] else : params = [ ] return ( name , params )
Parse an attribute signature
199
def get_msdn_ref ( name ) : in_msdn = False if name in MSDN_VALUE_TYPES : name = MSDN_VALUE_TYPES [ name ] in_msdn = True if name . startswith ( 'System.' ) : in_msdn = True if in_msdn : link = name . split ( '<' ) [ 0 ] if link in MSDN_LINK_MAP : link = MSDN_LINK_MAP [ link ] else : link = link . lower ( ) url = 'https://msdn.microsoft.com/en-us/library/' + link + '.aspx' node = nodes . reference ( name , shorten_type ( name ) ) node [ 'refuri' ] = url node [ 'reftitle' ] = name return node else : return None
Try and create a reference to a type on MSDN