@@ -62,7 +62,8 @@ def process_entities(entities):
6262@click .option ('--max-token-count' , '-c' , default = 1024 , help = 'max number of processed CSVs to send per query (default 1024)' )
6363@click .option ('--max-buffer-size' , '-b' , default = 2048 , help = 'max buffer size in megabytes (default 2048)' )
6464@click .option ('--max-token-size' , '-t' , default = 500 , help = 'max size of each token in megabytes (default 500, max 512)' )
65- def bulk_insert (graph , host , port , password , nodes , nodes_with_label , relations , relations_with_type , separator , enforce_schema , skip_invalid_nodes , skip_invalid_edges , quote , max_token_count , max_buffer_size , max_token_size ):
65+ @click .option ('--index' , '-i' , multiple = True , help = 'Label:Propery on which to create an index' )
66+ def bulk_insert (graph , host , port , password , nodes , nodes_with_label , relations , relations_with_type , separator , enforce_schema , skip_invalid_nodes , skip_invalid_edges , quote , max_token_count , max_buffer_size , max_token_size , index ):
6667 if sys .version_info [0 ] < 3 :
6768 raise Exception ("Python 3 is required for the RedisGraph bulk loader." )
6869
@@ -115,6 +116,16 @@ def bulk_insert(graph, host, port, password, nodes, nodes_with_label, relations,
115116 end_time = timer ()
116117 query_buf .report_completion (end_time - start_time )
117118
119+ for i in index :
120+ l , p = i .split (":" )
121+ print ("Creating Index on Label: %s, Property: %s" % (l , p ))
122+ try :
123+ index_create = client .execute_command ("GRAPH.QUERY" , graph , "CREATE INDEX ON :%s(%s)" % (l , p ))
124+ for z in index_create :
125+ print (z [0 ].decode ("utf-8" ) )
126+ except redis .exceptions .ResponseError as e :
127+ print ("Unable to create Index on Label: %s, Property %s" % (l , p ))
128+ print (e )
118129
119130if __name__ == '__main__' :
120131 bulk_insert ()
0 commit comments