changed naming of messaging methods
Browse files
run.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import os
|
2 |
|
3 |
import hydra
|
@@ -20,7 +21,7 @@ from aiflows.workers import run_dispatch_worker_thread
|
|
20 |
CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
|
21 |
# clear_cache() # Uncomment this line to clear the cache
|
22 |
|
23 |
-
logging.set_verbosity_debug()
|
24 |
|
25 |
|
26 |
dependencies = [
|
@@ -105,10 +106,10 @@ if __name__ == "__main__":
|
|
105 |
)
|
106 |
|
107 |
#option2: use the proxy_flow
|
108 |
-
#input_message = proxy_flow.
|
109 |
|
110 |
#7. ~~~ Run inference ~~~
|
111 |
-
future = proxy_flow.
|
112 |
|
113 |
#uncomment this line if you would like to get the full message back
|
114 |
#reply_message = future.get_message()
|
|
|
1 |
+
"""A simple script to run a Flow that can be used for development and debugging."""
|
2 |
import os
|
3 |
|
4 |
import hydra
|
|
|
21 |
CACHING_PARAMETERS.do_caching = False # Set to True in order to disable caching
|
22 |
# clear_cache() # Uncomment this line to clear the cache
|
23 |
|
24 |
+
# logging.set_verbosity_debug()
|
25 |
|
26 |
|
27 |
dependencies = [
|
|
|
106 |
)
|
107 |
|
108 |
#option2: use the proxy_flow
|
109 |
+
#input_message = proxy_flow.package_input_message(data = data)
|
110 |
|
111 |
#7. ~~~ Run inference ~~~
|
112 |
+
future = proxy_flow.get_reply_future(input_message)
|
113 |
|
114 |
#uncomment this line if you would like to get the full message back
|
115 |
#reply_message = future.get_message()
|