interactive_prompt.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. from framework.manager import Manager
  2. from state.devstate import DevState
  3. from PyInquirer import prompt, Separator
  4. from pprint import pprint
  5. import prompt_questions as questions
  6. from inspect import signature
  7. from uuid import UUID
  8. def generate_context_question(ctx_type, services):
  9. choices = [
  10. s.__name__.replace('_', ' ') for s in services
  11. ]
  12. choices = sorted(choices)
  13. choices.append(Separator())
  14. choices.append("close context")
  15. ctx_question = [
  16. {
  17. 'type': 'list',
  18. 'name': 'op',
  19. 'message': f'Currently in context {ctx_type.__name__}, which operation would you like to perform?',
  20. 'choices': choices,
  21. 'filter': lambda x: x.replace(' ', '_')
  22. }
  23. ]
  24. return ctx_question
  25. def main():
  26. state = DevState()
  27. man = Manager(state)
  28. while True:
  29. if man.current_model is not None and man.current_context is None:
  30. answer = prompt(questions.MODEL_SELECTED)
  31. ctx = man
  32. elif man.current_model is not None and man.current_context is not None:
  33. qs = generate_context_question(type(man.current_context), man.get_services())
  34. answer = prompt(qs)
  35. if answer['op'] == 'close_context':
  36. man.close_context()
  37. continue
  38. else:
  39. ctx = man.current_context
  40. else:
  41. answer = prompt(questions.MODEL_MGMT)
  42. ctx = man
  43. if answer['op'] == 'exit':
  44. break
  45. else:
  46. method = getattr(ctx, answer['op'])
  47. args_questions = []
  48. types = {}
  49. for p in signature(method).parameters.values():
  50. types[p.name] = p.annotation # can't use filter in question dict, doesn't work for some reason...
  51. if p.annotation == UUID:
  52. args_questions.append({
  53. 'type': 'list',
  54. 'name': p.name,
  55. 'message': f'{p.name.replace("_", " ")}?',
  56. 'choices': list(man.get_models()),
  57. 'filter': lambda x: state.read_value(state.read_dict(state.read_root(), x))
  58. })
  59. else:
  60. args_questions.append({
  61. 'type': 'input',
  62. 'name': p.name,
  63. 'message': f'{p.name.replace("_", " ")}?',
  64. 'filter': lambda x: False if x.lower() == 'false' else x
  65. })
  66. args = prompt(args_questions)
  67. args = {k: types[k](v) if len(v) > 0 else None for k, v in args.items()}
  68. try:
  69. output = method(**args)
  70. if output is not None:
  71. try:
  72. if isinstance(output, str):
  73. raise TypeError
  74. output = list(output)
  75. if len(output) > 0:
  76. for o in sorted(output):
  77. print(f"\u2022 {o}")
  78. except TypeError:
  79. print(f"\u2022 {output}")
  80. except RuntimeError as e:
  81. print(e)
  82. if __name__ == '__main__':
  83. print("""Welcome to...
  84. __ ____ _____
  85. | \/ \ \ / /__ \
  86. | \ / |\ \ / / ) |
  87. | |\/| | \ \/ / / /
  88. | | | | \ / / /_
  89. |_| |_| \/ |____|
  90. """)
  91. main()