From 2b3664ce36b4c9d1acf7319c9ff6e1b52f597ef2 Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Tue, 23 Apr 2024 09:29:59 +0100 Subject: [PATCH 1/8] #2476 Add proof of concept yaml combining notebook --- .pre-commit-config.yaml | 1 + .../scenario_with_placeholders/greens_1.yaml | 98 +++ .../scenario_with_placeholders/greens_2.yaml | 49 ++ .../scenario_with_placeholders/reds_1.yaml | 32 + .../scenario_with_placeholders/reds_2.yaml | 32 + .../scenario_with_placeholders/scenario.yaml | 788 ++++++++++++++++++ .../scenario_with_placeholders/schedule.yaml | 18 + .../notebooks/Scenario-Placeholders.ipynb | 142 ++++ src/primaite/notebooks/variables.yaml | 7 + 9 files changed, 1167 insertions(+) create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml create mode 100644 src/primaite/notebooks/Scenario-Placeholders.ipynb create mode 100644 src/primaite/notebooks/variables.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 56dc6424..91230171 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,7 @@ repos: rev: v4.4.0 hooks: - id: check-yaml + exclude: scenario_with_placeholders/ - id: end-of-file-fixer - id: trailing-whitespace - id: check-added-large-files diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml new file mode 100644 index 00000000..2702cbe6 --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml @@ -0,0 +1,98 @@ +greens: &greens + - ref: green_client_2 + team: GREEN + type: ProbabilisticAgent + agent_settings: + action_probabilities: + 0: 0.3 + 1: 0.6 + 2: 0.1 + observation_space: null + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client_2 + applications: + - application_name: WebBrowser + - application_name: DatabaseClient + max_folders_per_node: 1 + max_files_per_folder: 1 + max_services_per_node: 1 + max_applications_per_node: 2 + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 0 + 2: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 1 + + reward_function: + reward_components: + - type: WEBPAGE_UNAVAILABLE_PENALTY + weight: 0.25 + options: + node_hostname: client_2 + - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY + weight: 0.05 + options: + node_hostname: client_2 + + - ref: green_client_1 + team: GREEN + type: ProbabilisticAgent + agent_settings: + action_probabilities: + 0: 0.3 + 1: 0.6 + 2: 0.1 + observation_space: null + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client_1 + applications: + - application_name: WebBrowser + - application_name: DatabaseClient + max_folders_per_node: 1 + max_files_per_folder: 1 + max_services_per_node: 1 + max_applications_per_node: 2 + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 0 + 2: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 1 + + reward_function: + reward_components: + - type: WEBPAGE_UNAVAILABLE_PENALTY + weight: 0.25 + options: + node_hostname: client_1 + - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY + weight: 0.05 + options: + node_hostname: client_1 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml new file mode 100644 index 00000000..e0c33656 --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml @@ -0,0 +1,49 @@ +greens: &greens + - ref: green_client_2 + team: GREEN + type: ProbabilisticAgent + agent_settings: + action_probabilities: + 0: 0.3 + 1: 0.6 + 2: 0.1 + observation_space: null + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client_2 + applications: + - application_name: WebBrowser + - application_name: DatabaseClient + max_folders_per_node: 1 + max_files_per_folder: 1 + max_services_per_node: 1 + max_applications_per_node: 2 + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 0 + 2: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 1 + + reward_function: + reward_components: + - type: WEBPAGE_UNAVAILABLE_PENALTY + weight: 0.25 + options: + node_hostname: client_2 + - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY + weight: 0.05 + options: + node_hostname: client_2 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml new file mode 100644 index 00000000..f41fca8d --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml @@ -0,0 +1,32 @@ +reds: &reds + - ref: attacker_1 + team: RED + type: RedDatabaseCorruptingAgent + + observation_space: null + + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client_1 + applications: + - application_name: DataManipulationBot + - node_name: client_2 + applications: + - application_name: DataManipulationBot + max_folders_per_node: 1 + max_files_per_folder: 1 + max_services_per_node: 1 + + reward_function: + reward_components: + - type: DUMMY + + agent_settings: # options specific to this particular agent type, basically args of __init__(self) + start_settings: + start_step: 25 + frequency: 20 + variance: 5 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml new file mode 100644 index 00000000..13e1dd3b --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml @@ -0,0 +1,32 @@ +reds: &reds + - ref: attacker_2 + team: RED + type: RedDatabaseCorruptingAgent + + observation_space: null + + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client_1 + applications: + - application_name: DataManipulationBot + - node_name: client_2 + applications: + - application_name: DataManipulationBot + max_folders_per_node: 1 + max_files_per_folder: 1 + max_services_per_node: 1 + + reward_function: + reward_components: + - type: DUMMY + + agent_settings: # options specific to this particular agent type, basically args of __init__(self) + start_settings: + start_step: 10 + frequency: 4 + variance: 1 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml new file mode 100644 index 00000000..426b79c7 --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml @@ -0,0 +1,788 @@ +io_settings: + save_agent_actions: true + save_step_metadata: false + save_pcap_logs: false + save_sys_logs: false + + +game: + max_episode_length: 128 + ports: + - HTTP + - POSTGRES_SERVER + protocols: + - ICMP + - TCP + - UDP + thresholds: + nmne: + high: 10 + medium: 5 + low: 0 + +agents: + - *greens + - *reds + - *blue(s) + + - ref: defender + team: BLUE + type: ProxyAgent + observation_space: + type: CUSTOM + options: + components: + - type: NODES + label: NODES + options: + hosts: + - hostname: domain_controller + - hostname: web_server + services: + - service_name: WebServer + - hostname: database_server + folders: + - folder_name: database + files: + - file_name: database.db + - hostname: backup_server + - hostname: security_suite + - hostname: client_1 + - hostname: client_2 + num_services: 1 + num_applications: 0 + num_folders: 1 + num_files: 1 + num_nics: 2 + include_num_access: false + include_nmne: true + routers: + - hostname: router_1 + num_ports: 0 + ip_list: + - 192.168.1.10 + - 192.168.1.12 + - 192.168.1.14 + - 192.168.1.16 + - 192.168.1.110 + - 192.168.10.21 + - 192.168.10.22 + - 192.168.10.110 + wildcard_list: + - 0.0.0.1 + port_list: + - 80 + - 5432 + protocol_list: + - ICMP + - TCP + - UDP + num_rules: 10 + + - type: LINKS + label: LINKS + options: + link_references: + - router_1:eth-1<->switch_1:eth-8 + - router_1:eth-2<->switch_2:eth-8 + - switch_1:eth-1<->domain_controller:eth-1 + - switch_1:eth-2<->web_server:eth-1 + - switch_1:eth-3<->database_server:eth-1 + - switch_1:eth-4<->backup_server:eth-1 + - switch_1:eth-7<->security_suite:eth-1 + - switch_2:eth-1<->client_1:eth-1 + - switch_2:eth-2<->client_2:eth-1 + - switch_2:eth-7<->security_suite:eth-2 + - type: "NONE" + label: ICS + options: {} + action_space: + action_list: + - type: DONOTHING + - type: NODE_SERVICE_SCAN + - type: NODE_SERVICE_STOP + - type: NODE_SERVICE_START + - type: NODE_SERVICE_PAUSE + - type: NODE_SERVICE_RESUME + - type: NODE_SERVICE_RESTART + - type: NODE_SERVICE_DISABLE + - type: NODE_SERVICE_ENABLE + - type: NODE_SERVICE_FIX + - type: NODE_FILE_SCAN + - type: NODE_FILE_CHECKHASH + - type: NODE_FILE_DELETE + - type: NODE_FILE_REPAIR + - type: NODE_FILE_RESTORE + - type: NODE_FOLDER_SCAN + - type: NODE_FOLDER_CHECKHASH + - type: NODE_FOLDER_REPAIR + - type: NODE_FOLDER_RESTORE + - type: NODE_OS_SCAN + - type: NODE_SHUTDOWN + - type: NODE_STARTUP + - type: NODE_RESET + - type: ROUTER_ACL_ADDRULE + - type: ROUTER_ACL_REMOVERULE + - type: HOST_NIC_ENABLE + - type: HOST_NIC_DISABLE + + action_map: + 0: + action: DONOTHING + options: {} + # scan webapp service + 1: + action: NODE_SERVICE_SCAN + options: + node_id: 1 + service_id: 0 + # stop webapp service + 2: + action: NODE_SERVICE_STOP + options: + node_id: 1 + service_id: 0 + # start webapp service + 3: + action: "NODE_SERVICE_START" + options: + node_id: 1 + service_id: 0 + 4: + action: "NODE_SERVICE_PAUSE" + options: + node_id: 1 + service_id: 0 + 5: + action: "NODE_SERVICE_RESUME" + options: + node_id: 1 + service_id: 0 + 6: + action: "NODE_SERVICE_RESTART" + options: + node_id: 1 + service_id: 0 + 7: + action: "NODE_SERVICE_DISABLE" + options: + node_id: 1 + service_id: 0 + 8: + action: "NODE_SERVICE_ENABLE" + options: + node_id: 1 + service_id: 0 + 9: # check database.db file + action: "NODE_FILE_SCAN" + options: + node_id: 2 + folder_id: 0 + file_id: 0 + 10: + action: "NODE_FILE_SCAN" # CHECKHASH replaced by SCAN - but the behaviour is the same in this context. + options: + node_id: 2 + folder_id: 0 + file_id: 0 + 11: + action: "NODE_FILE_DELETE" + options: + node_id: 2 + folder_id: 0 + file_id: 0 + 12: + action: "NODE_FILE_REPAIR" + options: + node_id: 2 + folder_id: 0 + file_id: 0 + 13: + action: "NODE_SERVICE_FIX" + options: + node_id: 2 + service_id: 0 + 14: + action: "NODE_FOLDER_SCAN" + options: + node_id: 2 + folder_id: 0 + 15: + action: "NODE_FOLDER_SCAN" # CHECKHASH replaced by SCAN - but the behaviour is the same in this context. + options: + node_id: 2 + folder_id: 0 + 16: + action: "NODE_FOLDER_REPAIR" + options: + node_id: 2 + folder_id: 0 + 17: + action: "NODE_FOLDER_RESTORE" + options: + node_id: 2 + folder_id: 0 + 18: + action: "NODE_OS_SCAN" + options: + node_id: 0 + 19: + action: "NODE_SHUTDOWN" + options: + node_id: 0 + 20: + action: NODE_STARTUP + options: + node_id: 0 + 21: + action: NODE_RESET + options: + node_id: 0 + 22: + action: "NODE_OS_SCAN" + options: + node_id: 1 + 23: + action: "NODE_SHUTDOWN" + options: + node_id: 1 + 24: + action: NODE_STARTUP + options: + node_id: 1 + 25: + action: NODE_RESET + options: + node_id: 1 + 26: # old action num: 18 + action: "NODE_OS_SCAN" + options: + node_id: 2 + 27: + action: "NODE_SHUTDOWN" + options: + node_id: 2 + 28: + action: NODE_STARTUP + options: + node_id: 2 + 29: + action: NODE_RESET + options: + node_id: 2 + 30: + action: "NODE_OS_SCAN" + options: + node_id: 3 + 31: + action: "NODE_SHUTDOWN" + options: + node_id: 3 + 32: + action: NODE_STARTUP + options: + node_id: 3 + 33: + action: NODE_RESET + options: + node_id: 3 + 34: + action: "NODE_OS_SCAN" + options: + node_id: 4 + 35: + action: "NODE_SHUTDOWN" + options: + node_id: 4 + 36: + action: NODE_STARTUP + options: + node_id: 4 + 37: + action: NODE_RESET + options: + node_id: 4 + 38: + action: "NODE_OS_SCAN" + options: + node_id: 5 + 39: # old action num: 19 # shutdown client 1 + action: "NODE_SHUTDOWN" + options: + node_id: 5 + 40: # old action num: 20 + action: NODE_STARTUP + options: + node_id: 5 + 41: # old action num: 21 + action: NODE_RESET + options: + node_id: 5 + 42: + action: "NODE_OS_SCAN" + options: + node_id: 6 + 43: + action: "NODE_SHUTDOWN" + options: + node_id: 6 + 44: + action: NODE_STARTUP + options: + node_id: 6 + 45: + action: NODE_RESET + options: + node_id: 6 + + 46: # old action num: 22 # "ACL: ADDRULE - Block outgoing traffic from client 1" + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 1 + permission: 2 + source_ip_id: 7 # client 1 + dest_ip_id: 1 # ALL + source_port_id: 1 + dest_port_id: 1 + protocol_id: 1 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 47: # old action num: 23 # "ACL: ADDRULE - Block outgoing traffic from client 2" + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 2 + permission: 2 + source_ip_id: 8 # client 2 + dest_ip_id: 1 # ALL + source_port_id: 1 + dest_port_id: 1 + protocol_id: 1 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 48: # old action num: 24 # block tcp traffic from client 1 to web app + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 3 + permission: 2 + source_ip_id: 7 # client 1 + dest_ip_id: 3 # web server + source_port_id: 1 + dest_port_id: 1 + protocol_id: 3 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 49: # old action num: 25 # block tcp traffic from client 2 to web app + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 4 + permission: 2 + source_ip_id: 8 # client 2 + dest_ip_id: 3 # web server + source_port_id: 1 + dest_port_id: 1 + protocol_id: 3 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 50: # old action num: 26 + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 5 + permission: 2 + source_ip_id: 7 # client 1 + dest_ip_id: 4 # database + source_port_id: 1 + dest_port_id: 1 + protocol_id: 3 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 51: # old action num: 27 + action: "ROUTER_ACL_ADDRULE" + options: + target_router_nodename: router_1 + position: 6 + permission: 2 + source_ip_id: 8 # client 2 + dest_ip_id: 4 # database + source_port_id: 1 + dest_port_id: 1 + protocol_id: 3 + source_wildcard_id: 0 + dest_wildcard_id: 0 + 52: # old action num: 28 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 0 + 53: # old action num: 29 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 1 + 54: # old action num: 30 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 2 + 55: # old action num: 31 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 3 + 56: # old action num: 32 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 4 + 57: # old action num: 33 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 5 + 58: # old action num: 34 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 6 + 59: # old action num: 35 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 7 + 60: # old action num: 36 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 8 + 61: # old action num: 37 + action: "ROUTER_ACL_REMOVERULE" + options: + target_router_nodename: router_1 + position: 9 + 62: # old action num: 38 + action: "HOST_NIC_DISABLE" + options: + node_id: 0 + nic_id: 0 + 63: # old action num: 39 + action: "HOST_NIC_ENABLE" + options: + node_id: 0 + nic_id: 0 + 64: # old action num: 40 + action: "HOST_NIC_DISABLE" + options: + node_id: 1 + nic_id: 0 + 65: # old action num: 41 + action: "HOST_NIC_ENABLE" + options: + node_id: 1 + nic_id: 0 + 66: # old action num: 42 + action: "HOST_NIC_DISABLE" + options: + node_id: 2 + nic_id: 0 + 67: # old action num: 43 + action: "HOST_NIC_ENABLE" + options: + node_id: 2 + nic_id: 0 + 68: # old action num: 44 + action: "HOST_NIC_DISABLE" + options: + node_id: 3 + nic_id: 0 + 69: # old action num: 45 + action: "HOST_NIC_ENABLE" + options: + node_id: 3 + nic_id: 0 + 70: # old action num: 46 + action: "HOST_NIC_DISABLE" + options: + node_id: 4 + nic_id: 0 + 71: # old action num: 47 + action: "HOST_NIC_ENABLE" + options: + node_id: 4 + nic_id: 0 + 72: # old action num: 48 + action: "HOST_NIC_DISABLE" + options: + node_id: 4 + nic_id: 1 + 73: # old action num: 49 + action: "HOST_NIC_ENABLE" + options: + node_id: 4 + nic_id: 1 + 74: # old action num: 50 + action: "HOST_NIC_DISABLE" + options: + node_id: 5 + nic_id: 0 + 75: # old action num: 51 + action: "HOST_NIC_ENABLE" + options: + node_id: 5 + nic_id: 0 + 76: # old action num: 52 + action: "HOST_NIC_DISABLE" + options: + node_id: 6 + nic_id: 0 + 77: # old action num: 53 + action: "HOST_NIC_ENABLE" + options: + node_id: 6 + nic_id: 0 + + + + options: + nodes: + - node_name: domain_controller + - node_name: web_server + applications: + - application_name: DatabaseClient + services: + - service_name: WebServer + - node_name: database_server + folders: + - folder_name: database + files: + - file_name: database.db + services: + - service_name: DatabaseService + - node_name: backup_server + - node_name: security_suite + - node_name: client_1 + - node_name: client_2 + + max_folders_per_node: 2 + max_files_per_folder: 2 + max_services_per_node: 2 + max_nics_per_node: 8 + max_acl_rules: 10 + ip_list: + - 192.168.1.10 + - 192.168.1.12 + - 192.168.1.14 + - 192.168.1.16 + - 192.168.1.110 + - 192.168.10.21 + - 192.168.10.22 + - 192.168.10.110 + reward_function: + reward_components: + - type: DATABASE_FILE_INTEGRITY + weight: 0.40 + options: + node_hostname: database_server + folder_name: database + file_name: database.db + + agent_settings: + flatten_obs: true + + +simulation: + network: + nmne_config: + capture_nmne: true + nmne_capture_keywords: + - DELETE + nodes: + + - hostname: router_1 + type: router + num_ports: 5 + ports: + 1: + ip_address: 192.168.1.1 + subnet_mask: 255.255.255.0 + 2: + ip_address: 192.168.10.1 + subnet_mask: 255.255.255.0 + acl: + 18: + action: PERMIT + src_port: POSTGRES_SERVER + dst_port: POSTGRES_SERVER + 19: + action: PERMIT + src_port: DNS + dst_port: DNS + 20: + action: PERMIT + src_port: FTP + dst_port: FTP + 21: + action: PERMIT + src_port: HTTP + dst_port: HTTP + 22: + action: PERMIT + src_port: ARP + dst_port: ARP + 23: + action: PERMIT + protocol: ICMP + + - hostname: switch_1 + type: switch + num_ports: 8 + + - hostname: switch_2 + type: switch + num_ports: 8 + + - hostname: domain_controller + type: server + ip_address: 192.168.1.10 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + services: + - type: DNSServer + options: + domain_mapping: + arcd.com: 192.168.1.12 # web server + + - hostname: web_server + type: server + ip_address: 192.168.1.12 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + dns_server: 192.168.1.10 + services: + - type: WebServer + applications: + - type: DatabaseClient + options: + db_server_ip: 192.168.1.14 + + + - hostname: database_server + type: server + ip_address: 192.168.1.14 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + dns_server: 192.168.1.10 + services: + - type: DatabaseService + options: + backup_server_ip: 192.168.1.16 + - type: FTPClient + + - hostname: backup_server + type: server + ip_address: 192.168.1.16 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + dns_server: 192.168.1.10 + services: + - type: FTPServer + + - hostname: security_suite + type: server + ip_address: 192.168.1.110 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + dns_server: 192.168.1.10 + network_interfaces: + 2: # unfortunately this number is currently meaningless, they're just added in order and take up the next available slot + ip_address: 192.168.10.110 + subnet_mask: 255.255.255.0 + + - hostname: client_1 + type: computer + ip_address: 192.168.10.21 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.10.1 + dns_server: 192.168.1.10 + applications: + - type: DataManipulationBot + options: + port_scan_p_of_success: 0.8 + data_manipulation_p_of_success: 0.8 + payload: "DELETE" + server_ip: 192.168.1.14 + - type: WebBrowser + options: + target_url: http://arcd.com/users/ + - type: DatabaseClient + options: + db_server_ip: 192.168.1.14 + services: + - type: DNSClient + + - hostname: client_2 + type: computer + ip_address: 192.168.10.22 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.10.1 + dns_server: 192.168.1.10 + applications: + - type: WebBrowser + options: + target_url: http://arcd.com/users/ + - type: DataManipulationBot + options: + port_scan_p_of_success: 0.8 + data_manipulation_p_of_success: 0.8 + payload: "DELETE" + server_ip: 192.168.1.14 + - type: DatabaseClient + options: + db_server_ip: 192.168.1.14 + services: + - type: DNSClient + + links: + - endpoint_a_hostname: router_1 + endpoint_a_port: 1 + endpoint_b_hostname: switch_1 + endpoint_b_port: 8 + - endpoint_a_hostname: router_1 + endpoint_a_port: 2 + endpoint_b_hostname: switch_2 + endpoint_b_port: 8 + - endpoint_a_hostname: switch_1 + endpoint_a_port: 1 + endpoint_b_hostname: domain_controller + endpoint_b_port: 1 + - endpoint_a_hostname: switch_1 + endpoint_a_port: 2 + endpoint_b_hostname: web_server + endpoint_b_port: 1 + - endpoint_a_hostname: switch_1 + endpoint_a_port: 3 + endpoint_b_hostname: database_server + endpoint_b_port: 1 + - endpoint_a_hostname: switch_1 + endpoint_a_port: 4 + endpoint_b_hostname: backup_server + endpoint_b_port: 1 + - endpoint_a_hostname: switch_1 + endpoint_a_port: 7 + endpoint_b_hostname: security_suite + endpoint_b_port: 1 + - endpoint_a_hostname: switch_2 + endpoint_a_port: 1 + endpoint_b_hostname: client_1 + endpoint_b_port: 1 + - endpoint_a_hostname: switch_2 + endpoint_a_port: 2 + endpoint_b_hostname: client_2 + endpoint_b_port: 1 + - endpoint_a_hostname: switch_2 + endpoint_a_port: 7 + endpoint_b_hostname: security_suite + endpoint_b_port: 2 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml new file mode 100644 index 00000000..866c9895 --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml @@ -0,0 +1,18 @@ +base_scenario: scenario.yaml +schedule: + 0: + green: greens_1.yaml + red: reds_1.yaml + 1: + green: greens_1.yaml + red: reds_2.yaml + 2: + green: greens_2.yaml + red: reds_1.yaml + 3: + green: greens_2.yaml + red: reds_2.yaml + +# touch base with container to see what they've implemented for training schedule and evaluation schedule - for naming convention consistency +# when you exceed the number of episodes defined in the yaml, raise a warning and loop back to the beginning +# provide minimal functionality for checking compatibility- but we will assume that the user will correctly specify the blue/red/green agents and environment. diff --git a/src/primaite/notebooks/Scenario-Placeholders.ipynb b/src/primaite/notebooks/Scenario-Placeholders.ipynb new file mode 100644 index 00000000..67835999 --- /dev/null +++ b/src/primaite/notebooks/Scenario-Placeholders.ipynb @@ -0,0 +1,142 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "from pprint import pprint\n", + "from pathlib import Path\n", + "from typing import Sequence\n", + "from primaite.session.environment import PrimaiteGymEnv\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "froot = Path('/home/cade/repos/PrimAITE/src/primaite/config/_package_data/scenario_with_placeholders/')\n", + "sch = froot / 'schedule.yaml'\n", + "fp = froot / 'scenario.yaml'\n", + "fpr1 = froot / 'reds_1.yaml'\n", + "fpr2 = froot / 'reds_2.yaml'\n", + "fpg1 = froot / 'greens_1.yaml'\n", + "fpg2 = froot / 'greens_2.yaml'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\n", + "\n", + "with open(sch,'r') as f:\n", + " schedule = yaml.safe_load(f)\n", + "\n", + "base_scenario_path = froot / schedule['base_scenario']\n", + "episodes = [v for n,v in schedule['schedule'].items()]\n", + "all_episode_paths = {x for ep in episodes for x in ep.values()}\n", + "episode_data = {fp:open(froot / fp, 'r').read() for fp in all_episode_paths}\n", + "base_scenario = open(base_scenario_path).read()\n", + "\n", + "def get_ep_config(ep_num):\n", + " episode = episodes[ep_num]\n", + " # print(episode.values())\n", + " parsed_cfg = yaml.safe_load('\\n'.join([episode_data[v] for v in episode.values()] + [base_scenario]))\n", + " flat_agents_list = []\n", + " for a in parsed_cfg['agents']:\n", + " if isinstance(a,Sequence):\n", + " flat_agents_list.extend(a)\n", + " else:\n", + " flat_agents_list.append(a)\n", + " parsed_cfg['agents'] = flat_agents_list\n", + " return parsed_cfg\n", + "\n", + "\n", + "pprint(len(get_ep_config(0)['agents']))\n", + "# pprint(get_ep_config(0)['agents'])\n", + "\n", + "# conf_data = open('test_data_1.yaml','r').read()\n", + "# variables = open('variables.yaml','r').read()\n", + "\n", + "# yaml.safe_load(f\"{variables}\\n{conf_data}\")\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gym = PrimaiteGymEnv(game_config=get_ep_config(0))\n", + "print(list(gym.game.agents.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gym = PrimaiteGymEnv(game_config=get_ep_config(1))\n", + "print(list(gym.game.agents.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gym = PrimaiteGymEnv(game_config=get_ep_config(2))\n", + "print(list(gym.game.agents.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gym = PrimaiteGymEnv(game_config=get_ep_config(3))\n", + "print(list(gym.game.agents.keys()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/primaite/notebooks/variables.yaml b/src/primaite/notebooks/variables.yaml new file mode 100644 index 00000000..cb4637e0 --- /dev/null +++ b/src/primaite/notebooks/variables.yaml @@ -0,0 +1,7 @@ +placeholder_1: &placeholder_1 + - a + - b + +placeholder_2: &placeholder_2 + - c + - d From 28c8b7c9d98f8c16043279abf07ebbfa9014d1ec Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Tue, 23 Apr 2024 11:51:50 +0100 Subject: [PATCH 2/8] #2476 Get episode schedule working --- .../scenario_with_placeholders/greens_1.yaml | 4 +- .../scenario_with_placeholders/greens_2.yaml | 2 +- .../scenario_with_placeholders/reds_1.yaml | 2 +- .../scenario_with_placeholders/reds_2.yaml | 2 +- .../scenario_with_placeholders/scenario.yaml | 1 - .../scenario_with_placeholders/schedule.yaml | 16 +-- .../notebooks/Scenario-Placeholders.ipynb | 48 +++++++ src/primaite/session/environment.py | 21 ++- src/primaite/session/episode_schedule.py | 127 ++++++++++++++++++ 9 files changed, 198 insertions(+), 25 deletions(-) create mode 100644 src/primaite/session/episode_schedule.py diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml index 2702cbe6..e152f23f 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml @@ -1,5 +1,5 @@ greens: &greens - - ref: green_client_2 + - ref: green_A team: GREEN type: ProbabilisticAgent agent_settings: @@ -48,7 +48,7 @@ greens: &greens options: node_hostname: client_2 - - ref: green_client_1 + - ref: green_B team: GREEN type: ProbabilisticAgent agent_settings: diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml index e0c33656..87c8ffe3 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml @@ -1,5 +1,5 @@ greens: &greens - - ref: green_client_2 + - ref: green_C team: GREEN type: ProbabilisticAgent agent_settings: diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml index f41fca8d..9019f6c6 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml @@ -1,5 +1,5 @@ reds: &reds - - ref: attacker_1 + - ref: red_A team: RED type: RedDatabaseCorruptingAgent diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml index 13e1dd3b..c3304e17 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml @@ -1,5 +1,5 @@ reds: &reds - - ref: attacker_2 + - ref: red_B team: RED type: RedDatabaseCorruptingAgent diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml index 426b79c7..b3d47f78 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml @@ -23,7 +23,6 @@ game: agents: - *greens - *reds - - *blue(s) - ref: defender team: BLUE diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml index 866c9895..2d26eb31 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml @@ -1,17 +1,17 @@ base_scenario: scenario.yaml schedule: 0: - green: greens_1.yaml - red: reds_1.yaml + - greens_1.yaml + - reds_1.yaml 1: - green: greens_1.yaml - red: reds_2.yaml + - greens_1.yaml + - reds_2.yaml 2: - green: greens_2.yaml - red: reds_1.yaml + - greens_2.yaml + - reds_1.yaml 3: - green: greens_2.yaml - red: reds_2.yaml + - greens_2.yaml + - reds_2.yaml # touch base with container to see what they've implemented for training schedule and evaluation schedule - for naming convention consistency # when you exceed the number of episodes defined in the yaml, raise a warning and loop back to the beginning diff --git a/src/primaite/notebooks/Scenario-Placeholders.ipynb b/src/primaite/notebooks/Scenario-Placeholders.ipynb index 67835999..9de34a81 100644 --- a/src/primaite/notebooks/Scenario-Placeholders.ipynb +++ b/src/primaite/notebooks/Scenario-Placeholders.ipynb @@ -110,6 +110,54 @@ "print(list(gym.game.agents.keys()))" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from primaite.session.environment import PrimaiteGymEnv" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env_2 = PrimaiteGymEnv(game_config='/home/cade/repos/PrimAITE/src/primaite/config/_package_data/scenario_with_placeholders')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(10):\n", + " print(env_2.episode_counter)\n", + " print(list(env_2.game.agents.keys()))\n", + " env_2.reset()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env = PrimaiteGymEnv(game_config='/home/cade/repos/PrimAITE/src/primaite/config/_package_data/data_manipulation.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sum([[1,2],[3,4]])" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/src/primaite/session/environment.py b/src/primaite/session/environment.py index 9311e1f7..dea6b1dc 100644 --- a/src/primaite/session/environment.py +++ b/src/primaite/session/environment.py @@ -1,6 +1,7 @@ import copy import json -from typing import Any, Dict, Optional, SupportsFloat, Tuple +from os import PathLike +from typing import Any, Dict, Optional, SupportsFloat, Tuple, Union import gymnasium from gymnasium.core import ActType, ObsType @@ -9,6 +10,7 @@ from ray.rllib.env.multi_agent_env import MultiAgentEnv from primaite import getLogger from primaite.game.agent.interface import ProxyAgent from primaite.game.game import PrimaiteGame +from primaite.session.episode_schedule import build_scheduler, EpisodeScheduler from primaite.session.io import PrimaiteIO from primaite.simulator import SIM_OUTPUT @@ -23,17 +25,14 @@ class PrimaiteGymEnv(gymnasium.Env): assumptions about the agent list always having a list of length 1. """ - def __init__(self, game_config: Dict): + def __init__(self, game_config: Union[Dict, str, PathLike]): """Initialise the environment.""" super().__init__() - self.io = PrimaiteIO.from_config(game_config.get("io_settings", {})) + self.episode_scheduler: EpisodeScheduler = build_scheduler(game_config) + """Object that returns a config corresponding to the current episode.""" + self.io = PrimaiteIO.from_config(self.episode_scheduler(0).get("io_settings", {})) """Handles IO for the environment. This produces sys logs, agent logs, etc.""" - - self.game_config: Dict = game_config - """PrimaiteGame definition. This can be changed between episodes to enable curriculum learning.""" - self.io = PrimaiteIO.from_config(game_config.get("io_settings", {})) - """Handles IO for the environment. This produces sys logs, agent logs, etc.""" - self.game: PrimaiteGame = PrimaiteGame.from_config(copy.deepcopy(self.game_config)) + self.game: PrimaiteGame = PrimaiteGame.from_config(self.episode_scheduler(0)) """Current game.""" self._agent_name = next(iter(self.game.rl_agents)) """Name of the RL agent. Since there should only be one RL agent we can just pull the first and only key.""" @@ -94,9 +93,9 @@ class PrimaiteGymEnv(gymnasium.Env): if self.io.settings.save_agent_actions: all_agent_actions = {name: agent.action_history for name, agent in self.game.agents.items()} self.io.write_agent_actions(agent_actions=all_agent_actions, episode=self.episode_counter) - self.game: PrimaiteGame = PrimaiteGame.from_config(cfg=copy.deepcopy(self.game_config)) - self.game.setup_for_episode(episode=self.episode_counter) self.episode_counter += 1 + self.game: PrimaiteGame = PrimaiteGame.from_config(cfg=self.episode_scheduler(self.episode_counter)) + self.game.setup_for_episode(episode=self.episode_counter) state = self.game.get_sim_state() self.game.update_agents(state=state) next_obs = self._get_obs() diff --git a/src/primaite/session/episode_schedule.py b/src/primaite/session/episode_schedule.py new file mode 100644 index 00000000..2245e2b5 --- /dev/null +++ b/src/primaite/session/episode_schedule.py @@ -0,0 +1,127 @@ +import copy +from abc import ABC, abstractmethod +from os import PathLike +from pathlib import Path +from typing import Dict, List, Mapping, Sequence, Union + +import pydantic + +from primaite import getLogger + +_LOGGER = getLogger(__name__) +import warnings +from itertools import chain + +import yaml + + +class EpisodeScheduler(pydantic.BaseModel, ABC): + """ + Episode schedulers provide functionality to select different scenarios and game setups for each episode. + + This is useful when implementing advanced RL concepts like curriculum learning and domain randomisation. + """ + + @abstractmethod + def __call__(self, episode_num: int) -> Dict: + """Return the config that should be used during this episode.""" + + +class ConstantEpisodeScheduler(EpisodeScheduler): + """ + The constant episode schedule simply provides the same game setup every time. + """ + + config: Dict + + def __call__(self, episode_num: int) -> Dict: + """Return the same config every time.""" + return copy.deepcopy(self.config) + + +class EpisodeListScheduler(EpisodeScheduler): + """The episode list u""" + + schedule: Mapping[int, List[str]] + """Mapping from episode number to list of filenames""" + episode_data: Mapping[str, str] + """Mapping from filename to yaml string.""" + base_scenario: str + """yaml string containing the base scenario.""" + + _exceeded_episode_list: bool = False + """ + Flag that's set to true when attempting to keep generating episodes after schedule runs out. + + When this happens, we loop back to the beginning, but a warning is raised. + """ + + # TODO: be careful about off-by-one errors with episode number- should it start at 0 or 1? + def __call__(self, episode_num: int) -> Dict: + if episode_num > len(self.schedule): + if not self._exceeded_episode_list: + self._exceeded_episode_list = True + _LOGGER.warn( + f"Running episode {episode_num} but the schedule only defines" + f"{len(self.schedule)} episodes. Looping back to the beginning" + ) + # not sure if we should be using a traditional warning, or a _LOGGER.warning + episode_num = episode_num % len(self.schedule) + + filenames_to_join = self.schedule[episode_num] + yaml_data_to_join = [self.episode_data[fn] for fn in filenames_to_join] + [self.base_scenario] + joined_yaml = "\n".join(yaml_data_to_join) + parsed_cfg = yaml.safe_load(joined_yaml) + + # Unfortunately, using placeholders like this is slightly hacky, so we have to flatten the list of agents + flat_agents_list = [] + for a in parsed_cfg["agents"]: + if isinstance(a, Sequence): + flat_agents_list.extend(a) + else: + flat_agents_list.append(a) + parsed_cfg["agents"] = flat_agents_list + + return parsed_cfg + + +def build_scheduler(config: Union[str, Path, Dict]) -> EpisodeScheduler: + """ + Convenience method to build an EpisodeScheduler with a dict, file path, or folder path. + + If a path to a folder is provided, it will be treated as a list of game scenarios. + Otherwise, if a dict or a single file is provided, it will be treated as a constant game scenario. + """ + # If we get a dict, return a constant episode schedule that repeats that one config forever + if isinstance(config, Dict): + return ConstantEpisodeScheduler(config=config) + + # Cast string to Path + if isinstance(config, str): + config = Path(config) + + if not config.exists(): + raise FileNotFoundError(f"Provided config path {config} could not be found.") + + if config.is_file(): + with open(config, "r") as f: + cfg_data = yaml.safe_load(f) + return ConstantEpisodeScheduler(config=cfg_data) + + if not config.is_dir(): + raise RuntimeError("Something went wrong while building Primaite config.") + + root = config + schedule_path = root / "schedule.yaml" + + with open(schedule_path, "r") as f: + schedule = yaml.safe_load(f) + + base_scenario_path = root / schedule["base_scenario"] + files_to_load = set(chain.from_iterable(schedule["schedule"].values())) + + episode_data = {fp: (root / fp).read_text() for fp in files_to_load} + + return EpisodeListScheduler( + schedule=schedule["schedule"], episode_data=episode_data, base_scenario=base_scenario_path.read_text() + ) From a92898d00191789bd502977fd3846bf8d164f523 Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Thu, 25 Apr 2024 13:25:26 +0100 Subject: [PATCH 3/8] #2476 Finalise explanation notebook for episode schedule --- .../scenario_with_placeholders/greens_0.yaml | 2 + .../scenario_with_placeholders/greens_1.yaml | 76 +- .../scenario_with_placeholders/greens_2.yaml | 29 +- .../scenario_with_placeholders/reds_0.yaml | 2 + .../scenario_with_placeholders/reds_1.yaml | 16 +- .../scenario_with_placeholders/reds_2.yaml | 14 +- .../scenario_with_placeholders/scenario.yaml | 779 ++---------------- .../scenario_with_placeholders/schedule.yaml | 14 +- .../notebooks/Scenario-Placeholders.ipynb | 190 ----- .../notebooks/Using-Episode-Schedules.ipynb | 372 +++++++++ src/primaite/session/episode_schedule.py | 17 +- 11 files changed, 489 insertions(+), 1022 deletions(-) create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/greens_0.yaml create mode 100644 src/primaite/config/_package_data/scenario_with_placeholders/reds_0.yaml delete mode 100644 src/primaite/notebooks/Scenario-Placeholders.ipynb create mode 100644 src/primaite/notebooks/Using-Episode-Schedules.ipynb diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_0.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_0.yaml new file mode 100644 index 00000000..f31c52fa --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_0.yaml @@ -0,0 +1,2 @@ +# No green agents present +greens: &greens [] diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml index e152f23f..98d2392a 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_1.yaml @@ -1,12 +1,11 @@ -greens: &greens +agents: &greens - ref: green_A team: GREEN type: ProbabilisticAgent agent_settings: action_probabilities: - 0: 0.3 - 1: 0.6 - 2: 0.1 + 0: 0.2 + 1: 0.8 observation_space: null action_space: action_list: @@ -14,14 +13,9 @@ greens: &greens - type: NODE_APPLICATION_EXECUTE options: nodes: - - node_name: client_2 + - node_name: client applications: - - application_name: WebBrowser - application_name: DatabaseClient - max_folders_per_node: 1 - max_files_per_folder: 1 - max_services_per_node: 1 - max_applications_per_node: 2 action_map: 0: action: DONOTHING @@ -31,68 +25,10 @@ greens: &greens options: node_id: 0 application_id: 0 - 2: - action: NODE_APPLICATION_EXECUTE - options: - node_id: 0 - application_id: 1 reward_function: reward_components: - - type: WEBPAGE_UNAVAILABLE_PENALTY - weight: 0.25 - options: - node_hostname: client_2 - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY - weight: 0.05 + weight: 1.0 options: - node_hostname: client_2 - - - ref: green_B - team: GREEN - type: ProbabilisticAgent - agent_settings: - action_probabilities: - 0: 0.3 - 1: 0.6 - 2: 0.1 - observation_space: null - action_space: - action_list: - - type: DONOTHING - - type: NODE_APPLICATION_EXECUTE - options: - nodes: - - node_name: client_1 - applications: - - application_name: WebBrowser - - application_name: DatabaseClient - max_folders_per_node: 1 - max_files_per_folder: 1 - max_services_per_node: 1 - max_applications_per_node: 2 - action_map: - 0: - action: DONOTHING - options: {} - 1: - action: NODE_APPLICATION_EXECUTE - options: - node_id: 0 - application_id: 0 - 2: - action: NODE_APPLICATION_EXECUTE - options: - node_id: 0 - application_id: 1 - - reward_function: - reward_components: - - type: WEBPAGE_UNAVAILABLE_PENALTY - weight: 0.25 - options: - node_hostname: client_1 - - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY - weight: 0.05 - options: - node_hostname: client_1 + node_hostname: client diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml index 87c8ffe3..17a5977b 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/greens_2.yaml @@ -1,12 +1,11 @@ -greens: &greens - - ref: green_C +agents: &greens + - ref: green_B team: GREEN type: ProbabilisticAgent agent_settings: action_probabilities: - 0: 0.3 - 1: 0.6 - 2: 0.1 + 0: 0.95 + 1: 0.05 observation_space: null action_space: action_list: @@ -14,14 +13,9 @@ greens: &greens - type: NODE_APPLICATION_EXECUTE options: nodes: - - node_name: client_2 + - node_name: client applications: - - application_name: WebBrowser - application_name: DatabaseClient - max_folders_per_node: 1 - max_files_per_folder: 1 - max_services_per_node: 1 - max_applications_per_node: 2 action_map: 0: action: DONOTHING @@ -31,19 +25,10 @@ greens: &greens options: node_id: 0 application_id: 0 - 2: - action: NODE_APPLICATION_EXECUTE - options: - node_id: 0 - application_id: 1 reward_function: reward_components: - - type: WEBPAGE_UNAVAILABLE_PENALTY - weight: 0.25 - options: - node_hostname: client_2 - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY - weight: 0.05 + weight: 1.0 options: - node_hostname: client_2 + node_hostname: client diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_0.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_0.yaml new file mode 100644 index 00000000..878aba97 --- /dev/null +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_0.yaml @@ -0,0 +1,2 @@ +# No red agents present +reds: &reds [] diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml index 9019f6c6..31675a0b 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_1.yaml @@ -11,22 +11,16 @@ reds: &reds - type: NODE_APPLICATION_EXECUTE options: nodes: - - node_name: client_1 + - node_name: client applications: - application_name: DataManipulationBot - - node_name: client_2 - applications: - - application_name: DataManipulationBot - max_folders_per_node: 1 - max_files_per_folder: 1 - max_services_per_node: 1 reward_function: reward_components: - type: DUMMY - agent_settings: # options specific to this particular agent type, basically args of __init__(self) + agent_settings: start_settings: - start_step: 25 - frequency: 20 - variance: 5 + start_step: 10 + frequency: 10 + variance: 0 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml index c3304e17..c5572b89 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/reds_2.yaml @@ -11,22 +11,16 @@ reds: &reds - type: NODE_APPLICATION_EXECUTE options: nodes: - - node_name: client_1 + - node_name: client applications: - application_name: DataManipulationBot - - node_name: client_2 - applications: - - application_name: DataManipulationBot - max_folders_per_node: 1 - max_files_per_folder: 1 - max_services_per_node: 1 reward_function: reward_components: - type: DUMMY - agent_settings: # options specific to this particular agent type, basically args of __init__(self) + agent_settings: start_settings: - start_step: 10 - frequency: 4 + start_step: 3 + frequency: 2 variance: 1 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml index b3d47f78..81848b2d 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/scenario.yaml @@ -34,551 +34,86 @@ agents: - type: NODES label: NODES options: + routers: [] hosts: - - hostname: domain_controller - - hostname: web_server - services: - - service_name: WebServer - - hostname: database_server - folders: - - folder_name: database - files: - - file_name: database.db - - hostname: backup_server - - hostname: security_suite - - hostname: client_1 - - hostname: client_2 + - hostname: client + - hostname: server num_services: 1 - num_applications: 0 + num_applications: 1 num_folders: 1 num_files: 1 - num_nics: 2 + num_nics: 1 include_num_access: false include_nmne: true - routers: - - hostname: router_1 - num_ports: 0 - ip_list: - - 192.168.1.10 - - 192.168.1.12 - - 192.168.1.14 - - 192.168.1.16 - - 192.168.1.110 - - 192.168.10.21 - - 192.168.10.22 - - 192.168.10.110 - wildcard_list: - - 0.0.0.1 - port_list: - - 80 - - 5432 - protocol_list: - - ICMP - - TCP - - UDP - num_rules: 10 - type: LINKS label: LINKS options: link_references: - - router_1:eth-1<->switch_1:eth-8 - - router_1:eth-2<->switch_2:eth-8 - - switch_1:eth-1<->domain_controller:eth-1 - - switch_1:eth-2<->web_server:eth-1 - - switch_1:eth-3<->database_server:eth-1 - - switch_1:eth-4<->backup_server:eth-1 - - switch_1:eth-7<->security_suite:eth-1 - - switch_2:eth-1<->client_1:eth-1 - - switch_2:eth-2<->client_2:eth-1 - - switch_2:eth-7<->security_suite:eth-2 - - type: "NONE" - label: ICS - options: {} + - client:eth-1<->switch_1:eth-1 + - server:eth-1<->switch_1:eth-2 + action_space: action_list: - type: DONOTHING - - type: NODE_SERVICE_SCAN - - type: NODE_SERVICE_STOP - - type: NODE_SERVICE_START - - type: NODE_SERVICE_PAUSE - - type: NODE_SERVICE_RESUME - - type: NODE_SERVICE_RESTART - - type: NODE_SERVICE_DISABLE - - type: NODE_SERVICE_ENABLE - - type: NODE_SERVICE_FIX - - type: NODE_FILE_SCAN - - type: NODE_FILE_CHECKHASH - - type: NODE_FILE_DELETE - - type: NODE_FILE_REPAIR - - type: NODE_FILE_RESTORE - - type: NODE_FOLDER_SCAN - - type: NODE_FOLDER_CHECKHASH - - type: NODE_FOLDER_REPAIR - - type: NODE_FOLDER_RESTORE - - type: NODE_OS_SCAN - type: NODE_SHUTDOWN - type: NODE_STARTUP - - type: NODE_RESET - - type: ROUTER_ACL_ADDRULE - - type: ROUTER_ACL_REMOVERULE - type: HOST_NIC_ENABLE - type: HOST_NIC_DISABLE - action_map: - 0: - action: DONOTHING - options: {} - # scan webapp service - 1: - action: NODE_SERVICE_SCAN - options: - node_id: 1 - service_id: 0 - # stop webapp service - 2: - action: NODE_SERVICE_STOP - options: - node_id: 1 - service_id: 0 - # start webapp service - 3: - action: "NODE_SERVICE_START" - options: - node_id: 1 - service_id: 0 - 4: - action: "NODE_SERVICE_PAUSE" - options: - node_id: 1 - service_id: 0 - 5: - action: "NODE_SERVICE_RESUME" - options: - node_id: 1 - service_id: 0 - 6: - action: "NODE_SERVICE_RESTART" - options: - node_id: 1 - service_id: 0 - 7: - action: "NODE_SERVICE_DISABLE" - options: - node_id: 1 - service_id: 0 - 8: - action: "NODE_SERVICE_ENABLE" - options: - node_id: 1 - service_id: 0 - 9: # check database.db file - action: "NODE_FILE_SCAN" - options: - node_id: 2 - folder_id: 0 - file_id: 0 - 10: - action: "NODE_FILE_SCAN" # CHECKHASH replaced by SCAN - but the behaviour is the same in this context. - options: - node_id: 2 - folder_id: 0 - file_id: 0 - 11: - action: "NODE_FILE_DELETE" - options: - node_id: 2 - folder_id: 0 - file_id: 0 - 12: - action: "NODE_FILE_REPAIR" - options: - node_id: 2 - folder_id: 0 - file_id: 0 - 13: - action: "NODE_SERVICE_FIX" - options: - node_id: 2 - service_id: 0 - 14: - action: "NODE_FOLDER_SCAN" - options: - node_id: 2 - folder_id: 0 - 15: - action: "NODE_FOLDER_SCAN" # CHECKHASH replaced by SCAN - but the behaviour is the same in this context. - options: - node_id: 2 - folder_id: 0 - 16: - action: "NODE_FOLDER_REPAIR" - options: - node_id: 2 - folder_id: 0 - 17: - action: "NODE_FOLDER_RESTORE" - options: - node_id: 2 - folder_id: 0 - 18: - action: "NODE_OS_SCAN" - options: - node_id: 0 - 19: - action: "NODE_SHUTDOWN" - options: - node_id: 0 - 20: - action: NODE_STARTUP - options: - node_id: 0 - 21: - action: NODE_RESET - options: - node_id: 0 - 22: - action: "NODE_OS_SCAN" - options: - node_id: 1 - 23: - action: "NODE_SHUTDOWN" - options: - node_id: 1 - 24: - action: NODE_STARTUP - options: - node_id: 1 - 25: - action: NODE_RESET - options: - node_id: 1 - 26: # old action num: 18 - action: "NODE_OS_SCAN" - options: - node_id: 2 - 27: - action: "NODE_SHUTDOWN" - options: - node_id: 2 - 28: - action: NODE_STARTUP - options: - node_id: 2 - 29: - action: NODE_RESET - options: - node_id: 2 - 30: - action: "NODE_OS_SCAN" - options: - node_id: 3 - 31: - action: "NODE_SHUTDOWN" - options: - node_id: 3 - 32: - action: NODE_STARTUP - options: - node_id: 3 - 33: - action: NODE_RESET - options: - node_id: 3 - 34: - action: "NODE_OS_SCAN" - options: - node_id: 4 - 35: - action: "NODE_SHUTDOWN" - options: - node_id: 4 - 36: - action: NODE_STARTUP - options: - node_id: 4 - 37: - action: NODE_RESET - options: - node_id: 4 - 38: - action: "NODE_OS_SCAN" - options: - node_id: 5 - 39: # old action num: 19 # shutdown client 1 - action: "NODE_SHUTDOWN" - options: - node_id: 5 - 40: # old action num: 20 - action: NODE_STARTUP - options: - node_id: 5 - 41: # old action num: 21 - action: NODE_RESET - options: - node_id: 5 - 42: - action: "NODE_OS_SCAN" - options: - node_id: 6 - 43: - action: "NODE_SHUTDOWN" - options: - node_id: 6 - 44: - action: NODE_STARTUP - options: - node_id: 6 - 45: - action: NODE_RESET - options: - node_id: 6 - - 46: # old action num: 22 # "ACL: ADDRULE - Block outgoing traffic from client 1" - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 1 - permission: 2 - source_ip_id: 7 # client 1 - dest_ip_id: 1 # ALL - source_port_id: 1 - dest_port_id: 1 - protocol_id: 1 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 47: # old action num: 23 # "ACL: ADDRULE - Block outgoing traffic from client 2" - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 2 - permission: 2 - source_ip_id: 8 # client 2 - dest_ip_id: 1 # ALL - source_port_id: 1 - dest_port_id: 1 - protocol_id: 1 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 48: # old action num: 24 # block tcp traffic from client 1 to web app - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 3 - permission: 2 - source_ip_id: 7 # client 1 - dest_ip_id: 3 # web server - source_port_id: 1 - dest_port_id: 1 - protocol_id: 3 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 49: # old action num: 25 # block tcp traffic from client 2 to web app - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 4 - permission: 2 - source_ip_id: 8 # client 2 - dest_ip_id: 3 # web server - source_port_id: 1 - dest_port_id: 1 - protocol_id: 3 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 50: # old action num: 26 - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 5 - permission: 2 - source_ip_id: 7 # client 1 - dest_ip_id: 4 # database - source_port_id: 1 - dest_port_id: 1 - protocol_id: 3 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 51: # old action num: 27 - action: "ROUTER_ACL_ADDRULE" - options: - target_router_nodename: router_1 - position: 6 - permission: 2 - source_ip_id: 8 # client 2 - dest_ip_id: 4 # database - source_port_id: 1 - dest_port_id: 1 - protocol_id: 3 - source_wildcard_id: 0 - dest_wildcard_id: 0 - 52: # old action num: 28 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 0 - 53: # old action num: 29 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 1 - 54: # old action num: 30 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 2 - 55: # old action num: 31 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 3 - 56: # old action num: 32 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 4 - 57: # old action num: 33 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 5 - 58: # old action num: 34 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 6 - 59: # old action num: 35 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 7 - 60: # old action num: 36 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 8 - 61: # old action num: 37 - action: "ROUTER_ACL_REMOVERULE" - options: - target_router_nodename: router_1 - position: 9 - 62: # old action num: 38 - action: "HOST_NIC_DISABLE" - options: - node_id: 0 - nic_id: 0 - 63: # old action num: 39 - action: "HOST_NIC_ENABLE" - options: - node_id: 0 - nic_id: 0 - 64: # old action num: 40 - action: "HOST_NIC_DISABLE" - options: - node_id: 1 - nic_id: 0 - 65: # old action num: 41 - action: "HOST_NIC_ENABLE" - options: - node_id: 1 - nic_id: 0 - 66: # old action num: 42 - action: "HOST_NIC_DISABLE" - options: - node_id: 2 - nic_id: 0 - 67: # old action num: 43 - action: "HOST_NIC_ENABLE" - options: - node_id: 2 - nic_id: 0 - 68: # old action num: 44 - action: "HOST_NIC_DISABLE" - options: - node_id: 3 - nic_id: 0 - 69: # old action num: 45 - action: "HOST_NIC_ENABLE" - options: - node_id: 3 - nic_id: 0 - 70: # old action num: 46 - action: "HOST_NIC_DISABLE" - options: - node_id: 4 - nic_id: 0 - 71: # old action num: 47 - action: "HOST_NIC_ENABLE" - options: - node_id: 4 - nic_id: 0 - 72: # old action num: 48 - action: "HOST_NIC_DISABLE" - options: - node_id: 4 - nic_id: 1 - 73: # old action num: 49 - action: "HOST_NIC_ENABLE" - options: - node_id: 4 - nic_id: 1 - 74: # old action num: 50 - action: "HOST_NIC_DISABLE" - options: - node_id: 5 - nic_id: 0 - 75: # old action num: 51 - action: "HOST_NIC_ENABLE" - options: - node_id: 5 - nic_id: 0 - 76: # old action num: 52 - action: "HOST_NIC_DISABLE" - options: - node_id: 6 - nic_id: 0 - 77: # old action num: 53 - action: "HOST_NIC_ENABLE" - options: - node_id: 6 - nic_id: 0 - - - + 0: + action: DONOTHING + options: {} + 1: + action: NODE_SHUTDOWN + options: + node_id: 0 + 2: + action: NODE_SHUTDOWN + options: + node_id: 1 + 3: + action: NODE_STARTUP + options: + node_id: 0 + 4: + action: NODE_STARTUP + options: + node_id: 1 + 5: + action: HOST_NIC_DISABLE + options: + node_id: 0 + nic_id: 0 + 6: + action: HOST_NIC_DISABLE + options: + node_id: 1 + nic_id: 0 + 7: + action: HOST_NIC_ENABLE + options: + node_id: 0 + nic_id: 0 + 8: + action: HOST_NIC_ENABLE + options: + node_id: 1 + nic_id: 0 options: nodes: - - node_name: domain_controller - - node_name: web_server - applications: - - application_name: DatabaseClient - services: - - service_name: WebServer - - node_name: database_server - folders: - - folder_name: database - files: - - file_name: database.db - services: - - service_name: DatabaseService - - node_name: backup_server - - node_name: security_suite - - node_name: client_1 - - node_name: client_2 + - node_name: client + - node_name: server - max_folders_per_node: 2 - max_files_per_folder: 2 - max_services_per_node: 2 - max_nics_per_node: 8 - max_acl_rules: 10 + max_folders_per_node: 0 + max_files_per_folder: 0 + max_services_per_node: 0 + max_nics_per_node: 1 + max_acl_rules: 0 ip_list: - - 192.168.1.10 - - 192.168.1.12 - - 192.168.1.14 - - 192.168.1.16 - - 192.168.1.110 - - 192.168.10.21 - - 192.168.10.22 - - 192.168.10.110 + - 192.168.1.2 + - 192.168.1.3 + reward_function: reward_components: - type: DATABASE_FILE_INTEGRITY @@ -589,199 +124,45 @@ agents: file_name: database.db agent_settings: - flatten_obs: true + flatten_obs: false simulation: network: - nmne_config: - capture_nmne: true - nmne_capture_keywords: - - DELETE nodes: - - - hostname: router_1 - type: router - num_ports: 5 - ports: - 1: - ip_address: 192.168.1.1 - subnet_mask: 255.255.255.0 - 2: - ip_address: 192.168.10.1 - subnet_mask: 255.255.255.0 - acl: - 18: - action: PERMIT - src_port: POSTGRES_SERVER - dst_port: POSTGRES_SERVER - 19: - action: PERMIT - src_port: DNS - dst_port: DNS - 20: - action: PERMIT - src_port: FTP - dst_port: FTP - 21: - action: PERMIT - src_port: HTTP - dst_port: HTTP - 22: - action: PERMIT - src_port: ARP - dst_port: ARP - 23: - action: PERMIT - protocol: ICMP + - hostname: client + type: computer + ip_address: 192.168.1.2 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + applications: + - type: DatabaseClient + options: + db_server_ip: 192.168.1.3 + - type: DataManipulationBot + options: + server_ip: 192.168.1.3 + payload: "DELETE" - hostname: switch_1 type: switch - num_ports: 8 + num_ports: 2 - - hostname: switch_2 - type: switch - num_ports: 8 - - - hostname: domain_controller + - hostname: server type: server - ip_address: 192.168.1.10 + ip_address: 192.168.1.3 subnet_mask: 255.255.255.0 default_gateway: 192.168.1.1 services: - - type: DNSServer - options: - domain_mapping: - arcd.com: 192.168.1.12 # web server - - - hostname: web_server - type: server - ip_address: 192.168.1.12 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.1.1 - dns_server: 192.168.1.10 - services: - - type: WebServer - applications: - - type: DatabaseClient - options: - db_server_ip: 192.168.1.14 - - - - hostname: database_server - type: server - ip_address: 192.168.1.14 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.1.1 - dns_server: 192.168.1.10 - services: - - type: DatabaseService - options: - backup_server_ip: 192.168.1.16 - - type: FTPClient - - - hostname: backup_server - type: server - ip_address: 192.168.1.16 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.1.1 - dns_server: 192.168.1.10 - services: - - type: FTPServer - - - hostname: security_suite - type: server - ip_address: 192.168.1.110 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.1.1 - dns_server: 192.168.1.10 - network_interfaces: - 2: # unfortunately this number is currently meaningless, they're just added in order and take up the next available slot - ip_address: 192.168.10.110 - subnet_mask: 255.255.255.0 - - - hostname: client_1 - type: computer - ip_address: 192.168.10.21 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.10.1 - dns_server: 192.168.1.10 - applications: - - type: DataManipulationBot - options: - port_scan_p_of_success: 0.8 - data_manipulation_p_of_success: 0.8 - payload: "DELETE" - server_ip: 192.168.1.14 - - type: WebBrowser - options: - target_url: http://arcd.com/users/ - - type: DatabaseClient - options: - db_server_ip: 192.168.1.14 - services: - - type: DNSClient - - - hostname: client_2 - type: computer - ip_address: 192.168.10.22 - subnet_mask: 255.255.255.0 - default_gateway: 192.168.10.1 - dns_server: 192.168.1.10 - applications: - - type: WebBrowser - options: - target_url: http://arcd.com/users/ - - type: DataManipulationBot - options: - port_scan_p_of_success: 0.8 - data_manipulation_p_of_success: 0.8 - payload: "DELETE" - server_ip: 192.168.1.14 - - type: DatabaseClient - options: - db_server_ip: 192.168.1.14 - services: - - type: DNSClient + - type: DatabaseService links: - - endpoint_a_hostname: router_1 + - endpoint_a_hostname: client endpoint_a_port: 1 endpoint_b_hostname: switch_1 - endpoint_b_port: 8 - - endpoint_a_hostname: router_1 - endpoint_a_port: 2 - endpoint_b_hostname: switch_2 - endpoint_b_port: 8 - - endpoint_a_hostname: switch_1 + endpoint_b_port: 1 + + - endpoint_a_hostname: server endpoint_a_port: 1 - endpoint_b_hostname: domain_controller - endpoint_b_port: 1 - - endpoint_a_hostname: switch_1 - endpoint_a_port: 2 - endpoint_b_hostname: web_server - endpoint_b_port: 1 - - endpoint_a_hostname: switch_1 - endpoint_a_port: 3 - endpoint_b_hostname: database_server - endpoint_b_port: 1 - - endpoint_a_hostname: switch_1 - endpoint_a_port: 4 - endpoint_b_hostname: backup_server - endpoint_b_port: 1 - - endpoint_a_hostname: switch_1 - endpoint_a_port: 7 - endpoint_b_hostname: security_suite - endpoint_b_port: 1 - - endpoint_a_hostname: switch_2 - endpoint_a_port: 1 - endpoint_b_hostname: client_1 - endpoint_b_port: 1 - - endpoint_a_hostname: switch_2 - endpoint_a_port: 2 - endpoint_b_hostname: client_2 - endpoint_b_port: 1 - - endpoint_a_hostname: switch_2 - endpoint_a_port: 7 - endpoint_b_hostname: security_suite + endpoint_b_hostname: switch_1 endpoint_b_port: 2 diff --git a/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml index 2d26eb31..07ee4e50 100644 --- a/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml +++ b/src/primaite/config/_package_data/scenario_with_placeholders/schedule.yaml @@ -1,18 +1,14 @@ base_scenario: scenario.yaml schedule: 0: - - greens_1.yaml - - reds_1.yaml + - greens_0.yaml + - reds_0.yaml 1: - - greens_1.yaml - - reds_2.yaml + - greens_0.yaml + - reds_1.yaml 2: - - greens_2.yaml + - greens_1.yaml - reds_1.yaml 3: - greens_2.yaml - reds_2.yaml - -# touch base with container to see what they've implemented for training schedule and evaluation schedule - for naming convention consistency -# when you exceed the number of episodes defined in the yaml, raise a warning and loop back to the beginning -# provide minimal functionality for checking compatibility- but we will assume that the user will correctly specify the blue/red/green agents and environment. diff --git a/src/primaite/notebooks/Scenario-Placeholders.ipynb b/src/primaite/notebooks/Scenario-Placeholders.ipynb deleted file mode 100644 index 9de34a81..00000000 --- a/src/primaite/notebooks/Scenario-Placeholders.ipynb +++ /dev/null @@ -1,190 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import yaml\n", - "from pprint import pprint\n", - "from pathlib import Path\n", - "from typing import Sequence\n", - "from primaite.session.environment import PrimaiteGymEnv\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "froot = Path('/home/cade/repos/PrimAITE/src/primaite/config/_package_data/scenario_with_placeholders/')\n", - "sch = froot / 'schedule.yaml'\n", - "fp = froot / 'scenario.yaml'\n", - "fpr1 = froot / 'reds_1.yaml'\n", - "fpr2 = froot / 'reds_2.yaml'\n", - "fpg1 = froot / 'greens_1.yaml'\n", - "fpg2 = froot / 'greens_2.yaml'" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "\n", - "\n", - "with open(sch,'r') as f:\n", - " schedule = yaml.safe_load(f)\n", - "\n", - "base_scenario_path = froot / schedule['base_scenario']\n", - "episodes = [v for n,v in schedule['schedule'].items()]\n", - "all_episode_paths = {x for ep in episodes for x in ep.values()}\n", - "episode_data = {fp:open(froot / fp, 'r').read() for fp in all_episode_paths}\n", - "base_scenario = open(base_scenario_path).read()\n", - "\n", - "def get_ep_config(ep_num):\n", - " episode = episodes[ep_num]\n", - " # print(episode.values())\n", - " parsed_cfg = yaml.safe_load('\\n'.join([episode_data[v] for v in episode.values()] + [base_scenario]))\n", - " flat_agents_list = []\n", - " for a in parsed_cfg['agents']:\n", - " if isinstance(a,Sequence):\n", - " flat_agents_list.extend(a)\n", - " else:\n", - " flat_agents_list.append(a)\n", - " parsed_cfg['agents'] = flat_agents_list\n", - " return parsed_cfg\n", - "\n", - "\n", - "pprint(len(get_ep_config(0)['agents']))\n", - "# pprint(get_ep_config(0)['agents'])\n", - "\n", - "# conf_data = open('test_data_1.yaml','r').read()\n", - "# variables = open('variables.yaml','r').read()\n", - "\n", - "# yaml.safe_load(f\"{variables}\\n{conf_data}\")\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gym = PrimaiteGymEnv(game_config=get_ep_config(0))\n", - "print(list(gym.game.agents.keys()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gym = PrimaiteGymEnv(game_config=get_ep_config(1))\n", - "print(list(gym.game.agents.keys()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gym = PrimaiteGymEnv(game_config=get_ep_config(2))\n", - "print(list(gym.game.agents.keys()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "gym = PrimaiteGymEnv(game_config=get_ep_config(3))\n", - "print(list(gym.game.agents.keys()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from primaite.session.environment import PrimaiteGymEnv" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "env_2 = PrimaiteGymEnv(game_config='/home/cade/repos/PrimAITE/src/primaite/config/_package_data/scenario_with_placeholders')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "for i in range(10):\n", - " print(env_2.episode_counter)\n", - " print(list(env_2.game.agents.keys()))\n", - " env_2.reset()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "env = PrimaiteGymEnv(game_config='/home/cade/repos/PrimAITE/src/primaite/config/_package_data/data_manipulation.yaml')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "sum([[1,2],[3,4]])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/src/primaite/notebooks/Using-Episode-Schedules.ipynb b/src/primaite/notebooks/Using-Episode-Schedules.ipynb new file mode 100644 index 00000000..80e67065 --- /dev/null +++ b/src/primaite/notebooks/Using-Episode-Schedules.ipynb @@ -0,0 +1,372 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using Episode Schedules\n", + "\n", + "PrimAITE supports the ability to use different variations on a scenario at different episodes. This can be used to increase \n", + "domain randomisation to prevent overfitting, or to set up curriculum learning to train agents to perform more complicated tasks.\n", + "\n", + "When using a fixed scenario, a single yaml config file is used. However, to use episode schedules, PrimAITE uses a \n", + "directory with several config files that work together." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Defining variations in the config file.\n", + "\n", + "### Base scenario\n", + "The base scenario is essentially the same as a fixed yaml configuration, but it can contain placeholders that are \n", + "populated with different things at runtime each episode. The base scenario contains any network, agent, or settings that\n", + "remain fixed for the entire training/evaluation session.\n", + "\n", + "The placeholders are defined as YAML Aliases and they are denoted by an asterisk (`*placeholder`).\n", + "\n", + "### Variations\n", + "For each variation that could be used in a placeholder, there is a separate yaml file that contains the data that should populate the placeholder.\n", + "\n", + "The data that fills the placeholder is defined as a YAML Anchor in a separate file, denoted by an ampersand (`&anchor`).\n", + "\n", + "[Learn more about YAML Aliases and Anchors here.](https://www.educative.io/blog/advanced-yaml-syntax-cheatsheet#:~:text=YAML%20Anchors%20and%20Alias)\n", + "\n", + "### Schedule\n", + "Users must define which combination of scenario variations should be loaded in each episode. This takes the form of a\n", + "YAML file with a relative path to the base scenario and a list of paths to be loaded in during each episode.\n", + "\n", + "It takes the following format:\n", + "```yaml\n", + "base_scenario: base.yaml\n", + "schedule:\n", + " 0: # list of variations to load in at episode 0 (before the first call to env.reset() happens)\n", + " - laydown_1.yaml\n", + " - attack_1.yaml\n", + " 1: # list of variations to load in at episode 1 (after the first env.reset() call)\n", + " - laydown_2.yaml\n", + " - attack_2.yaml\n", + "```\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Demonstration" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run `primaite setup` to copy the example config files into the correct directory. Then, import and define config location." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!primaite setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "from primaite.session.environment import PrimaiteGymEnv\n", + "from primaite import PRIMAITE_PATHS\n", + "from prettytable import PrettyTable\n", + "scenario_path = PRIMAITE_PATHS.user_config_path / \"example_config/scenario_with_placeholders\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Base Scenario File\n", + "Let's view the contents of the base scenario file:\n", + "\n", + "It contains all the base settings that stay fixed throughout all episodes, including the `io_settings`, `game` settings, the network layout and the blue agent definition. There are two placeholders: `*greens` and `*reds`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"scenario.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Schedule File\n", + "Let's view the contents of the schedule file:\n", + "\n", + "This file references the base scenario file and defines which variations should be loaded in at each episode. In this instance, there are four episodes, during the first episode `greens_0` and `reds_0` is used, during the second episode `greens_0` and `reds_1` is used, and so on." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"schedule.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Green Agent Variation Files\n", + "\n", + "There are three different variants of the green agent setup. In `greens_0`, there are no green agents, in `greens_1` there is a green agent that executes the database client application 80% of the time, and in `greens_2` there is a green agent that executes the database client application 5% of the time.\n", + "\n", + "(the difference between `greens_1` and `greens_2` is in the agent name and action probabilities)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"greens_0.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"greens_1.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"greens_2.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Red Agent Variation Files\n", + "\n", + "There are three different variants of the red agent setup. In `reds_0`, there are no red agents, in `reds_1` there is a red agent that executes every 20 steps, but in `reds_2` there is a red agent that executes every 2 steps." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"reds_0.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"reds_1.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with open(scenario_path/\"reds_2.yaml\") as f:\n", + " print(f.read())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Running the simulation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create the environment using the variable config." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env = PrimaiteGymEnv(game_config=scenario_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Episode 0\n", + "Let' run the episodes to verify that the agents are changing as expected. In episode 0, there should be no green or red agents, just the defender blue agent." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(f\"Current episode number: {env.episode_counter}\")\n", + "print(f\"Agents present: {list(env.game.agents.keys())}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Episode 1\n", + "When we reset the environment, it moves onto episode 1, where it will bring in reds_1 for red agent definition.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()\n", + "print(f\"Current episode number: {env.episode_counter}\")\n", + "print(f\"Agents present: {list(env.game.agents.keys())}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Episode 2\n", + "When we reset the environment again, it moves onto episode 2, where it will bring in greens_1 and reds_1 for green and red agent definitions. Let's verify the agent names and that they take actions at the defined frequency.\n", + "\n", + "Most green actions will be `NODE_APPLICATION_EXECUTE` while red will `DONOTHING` except at steps 10 and 20." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()\n", + "print(f\"Current episode number: {env.episode_counter}\")\n", + "print(f\"Agents present: {list(env.game.agents.keys())}\")\n", + "for i in range(21):\n", + " env.step(0)\n", + "\n", + "table = PrettyTable()\n", + "table.field_names = [\"step\", \"Green Action\", \"Red Action\"]\n", + "for i in range(21):\n", + " green_action = env.game.agents['green_A'].action_history[i].action\n", + " red_action = env.game.agents['red_A'].action_history[i].action\n", + " table.add_row([i, green_action, red_action])\n", + "print(table)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Episode 3\n", + "When we reset the environment again, it moves onto episode 3, where it will bring in greens_2 and reds_2 for green and red agent definitions. Let's verify the agent names and that they take actions at the defined frequency.\n", + "\n", + "Now, green will perform `NODE_APPLICATION_EXECUTE` only 5% of the time, while red will perform `NODE_APPLICATION_EXECUTE` more frequently than before." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()\n", + "print(f\"Current episode number: {env.episode_counter}\")\n", + "print(f\"Agents present: {list(env.game.agents.keys())}\")\n", + "for i in range(21):\n", + " env.step(0)\n", + "\n", + "table = PrettyTable()\n", + "table.field_names = [\"step\", \"Green Action\", \"Red Action\"]\n", + "for i in range(21):\n", + " green_action = env.game.agents['green_B'].action_history[i].action\n", + " red_action = env.game.agents['red_B'].action_history[i].action\n", + " table.add_row([i, green_action, red_action])\n", + "print(table)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Further Episodes\n", + "\n", + "Since the schedule definition only goes up to episode 3, if we reset the environment again, we run out of episodes. The environment will simply loop back to the beginning, but it produces a warning message to make users aware that the episodes are being repeated." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "env.reset(); # semicolon suppresses jupyter outputting the observation space.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/primaite/session/episode_schedule.py b/src/primaite/session/episode_schedule.py index 2245e2b5..c726dcff 100644 --- a/src/primaite/session/episode_schedule.py +++ b/src/primaite/session/episode_schedule.py @@ -1,18 +1,15 @@ import copy from abc import ABC, abstractmethod -from os import PathLike +from itertools import chain from pathlib import Path from typing import Dict, List, Mapping, Sequence, Union import pydantic +import yaml from primaite import getLogger _LOGGER = getLogger(__name__) -import warnings -from itertools import chain - -import yaml class EpisodeScheduler(pydantic.BaseModel, ABC): @@ -28,9 +25,7 @@ class EpisodeScheduler(pydantic.BaseModel, ABC): class ConstantEpisodeScheduler(EpisodeScheduler): - """ - The constant episode schedule simply provides the same game setup every time. - """ + """The constant episode schedule simply provides the same game setup every time.""" config: Dict @@ -40,7 +35,7 @@ class ConstantEpisodeScheduler(EpisodeScheduler): class EpisodeListScheduler(EpisodeScheduler): - """The episode list u""" + """Cycle through a list of different game setups for each episode.""" schedule: Mapping[int, List[str]] """Mapping from episode number to list of filenames""" @@ -56,9 +51,9 @@ class EpisodeListScheduler(EpisodeScheduler): When this happens, we loop back to the beginning, but a warning is raised. """ - # TODO: be careful about off-by-one errors with episode number- should it start at 0 or 1? def __call__(self, episode_num: int) -> Dict: - if episode_num > len(self.schedule): + """Return the config for the given episode number.""" + if episode_num >= len(self.schedule): if not self._exceeded_episode_list: self._exceeded_episode_list = True _LOGGER.warn( From 42ce264e7352d09c8ce7449718028ab17bc1a813 Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Thu, 25 Apr 2024 13:54:05 +0100 Subject: [PATCH 4/8] #2476 Fix string formatting --- src/primaite/session/episode_schedule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/primaite/session/episode_schedule.py b/src/primaite/session/episode_schedule.py index c726dcff..69ae5778 100644 --- a/src/primaite/session/episode_schedule.py +++ b/src/primaite/session/episode_schedule.py @@ -57,7 +57,7 @@ class EpisodeListScheduler(EpisodeScheduler): if not self._exceeded_episode_list: self._exceeded_episode_list = True _LOGGER.warn( - f"Running episode {episode_num} but the schedule only defines" + f"Running episode {episode_num} but the schedule only defines " f"{len(self.schedule)} episodes. Looping back to the beginning" ) # not sure if we should be using a traditional warning, or a _LOGGER.warning From 66f31e8ed1111fbfe71c2105d82d3ae22422a80d Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Thu, 25 Apr 2024 15:09:46 +0100 Subject: [PATCH 5/8] #2476 Add test for episode scheduler --- CHANGELOG.md | 2 + .../notebooks/Using-Episode-Schedules.ipynb | 2 +- src/primaite/session/environment.py | 30 ++-- src/primaite/session/episode_schedule.py | 1 + .../scenario_with_placeholders/greens_0.yaml | 2 + .../scenario_with_placeholders/greens_1.yaml | 34 ++++ .../scenario_with_placeholders/greens_2.yaml | 34 ++++ .../scenario_with_placeholders/reds_0.yaml | 2 + .../scenario_with_placeholders/reds_1.yaml | 26 +++ .../scenario_with_placeholders/reds_2.yaml | 26 +++ .../scenario_with_placeholders/scenario.yaml | 168 ++++++++++++++++++ .../scenario_with_placeholders/schedule.yaml | 14 ++ .../environments/test_sb3_environment.py | 2 +- .../e2e_integration_tests/test_environment.py | 6 +- .../test_uc2_data_manipulation_scenario.py | 2 +- .../test_episode_scheduler.py | 68 +++++++ .../test_io_settings.py | 2 +- .../game_layer/test_actions.py | 4 +- .../game_layer/test_rewards.py | 2 +- .../unit_tests/_primaite/_session/__init__.py | 0 .../_session/test_episode_schedule.py | 52 ++++++ 21 files changed, 456 insertions(+), 23 deletions(-) create mode 100644 tests/assets/configs/scenario_with_placeholders/greens_0.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/greens_1.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/greens_2.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/reds_0.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/reds_1.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/reds_2.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/scenario.yaml create mode 100644 tests/assets/configs/scenario_with_placeholders/schedule.yaml create mode 100644 tests/integration_tests/configuration_file_parsing/test_episode_scheduler.py create mode 100644 tests/unit_tests/_primaite/_session/__init__.py create mode 100644 tests/unit_tests/_primaite/_session/test_episode_schedule.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 81fe5621..4147d6f1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Upgraded pydantic to version 2.7.0 - Upgraded Ray to version >= 2.9 - Added ipywidgets to the dependencies +- Added ability to define scenarios that change depending on the episode number. +- Standardised Environment API by renaming the config parameter of `PrimaiteGymEnv` from `game_config` to `env_config` ## [Unreleased] - Made requests fail to reach their target if the node is off diff --git a/src/primaite/notebooks/Using-Episode-Schedules.ipynb b/src/primaite/notebooks/Using-Episode-Schedules.ipynb index 80e67065..c616a410 100644 --- a/src/primaite/notebooks/Using-Episode-Schedules.ipynb +++ b/src/primaite/notebooks/Using-Episode-Schedules.ipynb @@ -227,7 +227,7 @@ "metadata": {}, "outputs": [], "source": [ - "env = PrimaiteGymEnv(game_config=scenario_path)" + "env = PrimaiteGymEnv(env_config=scenario_path)" ] }, { diff --git a/src/primaite/session/environment.py b/src/primaite/session/environment.py index dea6b1dc..abbf051b 100644 --- a/src/primaite/session/environment.py +++ b/src/primaite/session/environment.py @@ -1,4 +1,3 @@ -import copy import json from os import PathLike from typing import Any, Dict, Optional, SupportsFloat, Tuple, Union @@ -25,10 +24,10 @@ class PrimaiteGymEnv(gymnasium.Env): assumptions about the agent list always having a list of length 1. """ - def __init__(self, game_config: Union[Dict, str, PathLike]): + def __init__(self, env_config: Union[Dict, str, PathLike]): """Initialise the environment.""" super().__init__() - self.episode_scheduler: EpisodeScheduler = build_scheduler(game_config) + self.episode_scheduler: EpisodeScheduler = build_scheduler(env_config) """Object that returns a config corresponding to the current episode.""" self.io = PrimaiteIO.from_config(self.episode_scheduler(0).get("io_settings", {})) """Handles IO for the environment. This produces sys logs, agent logs, etc.""" @@ -140,8 +139,8 @@ class PrimaiteRayEnv(gymnasium.Env): :param env_config: A dictionary containing the environment configuration. :type env_config: Dict """ - self.env = PrimaiteGymEnv(game_config=env_config) - self.env.episode_counter -= 1 + self.env = PrimaiteGymEnv(env_config=env_config) + # self.env.episode_counter -= 1 self.action_space = self.env.action_space self.observation_space = self.env.observation_space @@ -157,6 +156,11 @@ class PrimaiteRayEnv(gymnasium.Env): """Close the simulation.""" self.env.close() + @property + def game(self) -> PrimaiteGame: + """Pass through game from env.""" + return self.env.game + class PrimaiteRayMARLEnv(MultiAgentEnv): """Ray Environment that inherits from MultiAgentEnv to allow training MARL systems.""" @@ -168,16 +172,16 @@ class PrimaiteRayMARLEnv(MultiAgentEnv): which is the PrimaiteGame instance. :type env_config: Dict """ - self.game_config: Dict = env_config - """PrimaiteGame definition. This can be changed between episodes to enable curriculum learning.""" - self.io = PrimaiteIO.from_config(env_config.get("io_settings")) + self.episode_counter: int = 0 + """Current episode number.""" + self.episode_scheduler: EpisodeScheduler = build_scheduler(env_config) + """Object that returns a config corresponding to the current episode.""" + self.io = PrimaiteIO.from_config(self.episode_scheduler(0).get("io_settings", {})) """Handles IO for the environment. This produces sys logs, agent logs, etc.""" - self.game: PrimaiteGame = PrimaiteGame.from_config(copy.deepcopy(self.game_config)) + self.game: PrimaiteGame = PrimaiteGame.from_config(self.episode_scheduler(self.episode_counter)) """Reference to the primaite game""" self._agent_ids = list(self.game.rl_agents.keys()) """Agent ids. This is a list of strings of agent names.""" - self.episode_counter: int = 0 - """Current episode number.""" self.terminateds = set() self.truncateds = set() @@ -203,9 +207,9 @@ class PrimaiteRayMARLEnv(MultiAgentEnv): if self.io.settings.save_agent_actions: all_agent_actions = {name: agent.action_history for name, agent in self.game.agents.items()} self.io.write_agent_actions(agent_actions=all_agent_actions, episode=self.episode_counter) - self.game: PrimaiteGame = PrimaiteGame.from_config(cfg=copy.deepcopy(self.game_config)) - self.game.setup_for_episode(episode=self.episode_counter) self.episode_counter += 1 + self.game: PrimaiteGame = PrimaiteGame.from_config(self.episode_scheduler(self.episode_counter)) + self.game.setup_for_episode(episode=self.episode_counter) state = self.game.get_sim_state() self.game.update_agents(state) next_obs = self._get_obs() diff --git a/src/primaite/session/episode_schedule.py b/src/primaite/session/episode_schedule.py index 69ae5778..fa010d27 100644 --- a/src/primaite/session/episode_schedule.py +++ b/src/primaite/session/episode_schedule.py @@ -22,6 +22,7 @@ class EpisodeScheduler(pydantic.BaseModel, ABC): @abstractmethod def __call__(self, episode_num: int) -> Dict: """Return the config that should be used during this episode.""" + ... class ConstantEpisodeScheduler(EpisodeScheduler): diff --git a/tests/assets/configs/scenario_with_placeholders/greens_0.yaml b/tests/assets/configs/scenario_with_placeholders/greens_0.yaml new file mode 100644 index 00000000..f31c52fa --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/greens_0.yaml @@ -0,0 +1,2 @@ +# No green agents present +greens: &greens [] diff --git a/tests/assets/configs/scenario_with_placeholders/greens_1.yaml b/tests/assets/configs/scenario_with_placeholders/greens_1.yaml new file mode 100644 index 00000000..98d2392a --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/greens_1.yaml @@ -0,0 +1,34 @@ +agents: &greens + - ref: green_A + team: GREEN + type: ProbabilisticAgent + agent_settings: + action_probabilities: + 0: 0.2 + 1: 0.8 + observation_space: null + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client + applications: + - application_name: DatabaseClient + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 0 + + reward_function: + reward_components: + - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY + weight: 1.0 + options: + node_hostname: client diff --git a/tests/assets/configs/scenario_with_placeholders/greens_2.yaml b/tests/assets/configs/scenario_with_placeholders/greens_2.yaml new file mode 100644 index 00000000..17a5977b --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/greens_2.yaml @@ -0,0 +1,34 @@ +agents: &greens + - ref: green_B + team: GREEN + type: ProbabilisticAgent + agent_settings: + action_probabilities: + 0: 0.95 + 1: 0.05 + observation_space: null + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client + applications: + - application_name: DatabaseClient + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_APPLICATION_EXECUTE + options: + node_id: 0 + application_id: 0 + + reward_function: + reward_components: + - type: GREEN_ADMIN_DATABASE_UNREACHABLE_PENALTY + weight: 1.0 + options: + node_hostname: client diff --git a/tests/assets/configs/scenario_with_placeholders/reds_0.yaml b/tests/assets/configs/scenario_with_placeholders/reds_0.yaml new file mode 100644 index 00000000..878aba97 --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/reds_0.yaml @@ -0,0 +1,2 @@ +# No red agents present +reds: &reds [] diff --git a/tests/assets/configs/scenario_with_placeholders/reds_1.yaml b/tests/assets/configs/scenario_with_placeholders/reds_1.yaml new file mode 100644 index 00000000..31675a0b --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/reds_1.yaml @@ -0,0 +1,26 @@ +reds: &reds + - ref: red_A + team: RED + type: RedDatabaseCorruptingAgent + + observation_space: null + + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client + applications: + - application_name: DataManipulationBot + + reward_function: + reward_components: + - type: DUMMY + + agent_settings: + start_settings: + start_step: 10 + frequency: 10 + variance: 0 diff --git a/tests/assets/configs/scenario_with_placeholders/reds_2.yaml b/tests/assets/configs/scenario_with_placeholders/reds_2.yaml new file mode 100644 index 00000000..c5572b89 --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/reds_2.yaml @@ -0,0 +1,26 @@ +reds: &reds + - ref: red_B + team: RED + type: RedDatabaseCorruptingAgent + + observation_space: null + + action_space: + action_list: + - type: DONOTHING + - type: NODE_APPLICATION_EXECUTE + options: + nodes: + - node_name: client + applications: + - application_name: DataManipulationBot + + reward_function: + reward_components: + - type: DUMMY + + agent_settings: + start_settings: + start_step: 3 + frequency: 2 + variance: 1 diff --git a/tests/assets/configs/scenario_with_placeholders/scenario.yaml b/tests/assets/configs/scenario_with_placeholders/scenario.yaml new file mode 100644 index 00000000..81848b2d --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/scenario.yaml @@ -0,0 +1,168 @@ +io_settings: + save_agent_actions: true + save_step_metadata: false + save_pcap_logs: false + save_sys_logs: false + + +game: + max_episode_length: 128 + ports: + - HTTP + - POSTGRES_SERVER + protocols: + - ICMP + - TCP + - UDP + thresholds: + nmne: + high: 10 + medium: 5 + low: 0 + +agents: + - *greens + - *reds + + - ref: defender + team: BLUE + type: ProxyAgent + observation_space: + type: CUSTOM + options: + components: + - type: NODES + label: NODES + options: + routers: [] + hosts: + - hostname: client + - hostname: server + num_services: 1 + num_applications: 1 + num_folders: 1 + num_files: 1 + num_nics: 1 + include_num_access: false + include_nmne: true + + - type: LINKS + label: LINKS + options: + link_references: + - client:eth-1<->switch_1:eth-1 + - server:eth-1<->switch_1:eth-2 + + action_space: + action_list: + - type: DONOTHING + - type: NODE_SHUTDOWN + - type: NODE_STARTUP + - type: HOST_NIC_ENABLE + - type: HOST_NIC_DISABLE + action_map: + 0: + action: DONOTHING + options: {} + 1: + action: NODE_SHUTDOWN + options: + node_id: 0 + 2: + action: NODE_SHUTDOWN + options: + node_id: 1 + 3: + action: NODE_STARTUP + options: + node_id: 0 + 4: + action: NODE_STARTUP + options: + node_id: 1 + 5: + action: HOST_NIC_DISABLE + options: + node_id: 0 + nic_id: 0 + 6: + action: HOST_NIC_DISABLE + options: + node_id: 1 + nic_id: 0 + 7: + action: HOST_NIC_ENABLE + options: + node_id: 0 + nic_id: 0 + 8: + action: HOST_NIC_ENABLE + options: + node_id: 1 + nic_id: 0 + options: + nodes: + - node_name: client + - node_name: server + + max_folders_per_node: 0 + max_files_per_folder: 0 + max_services_per_node: 0 + max_nics_per_node: 1 + max_acl_rules: 0 + ip_list: + - 192.168.1.2 + - 192.168.1.3 + + reward_function: + reward_components: + - type: DATABASE_FILE_INTEGRITY + weight: 0.40 + options: + node_hostname: database_server + folder_name: database + file_name: database.db + + agent_settings: + flatten_obs: false + + +simulation: + network: + nodes: + - hostname: client + type: computer + ip_address: 192.168.1.2 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + applications: + - type: DatabaseClient + options: + db_server_ip: 192.168.1.3 + - type: DataManipulationBot + options: + server_ip: 192.168.1.3 + payload: "DELETE" + + - hostname: switch_1 + type: switch + num_ports: 2 + + - hostname: server + type: server + ip_address: 192.168.1.3 + subnet_mask: 255.255.255.0 + default_gateway: 192.168.1.1 + services: + - type: DatabaseService + + links: + - endpoint_a_hostname: client + endpoint_a_port: 1 + endpoint_b_hostname: switch_1 + endpoint_b_port: 1 + + - endpoint_a_hostname: server + endpoint_a_port: 1 + endpoint_b_hostname: switch_1 + endpoint_b_port: 2 diff --git a/tests/assets/configs/scenario_with_placeholders/schedule.yaml b/tests/assets/configs/scenario_with_placeholders/schedule.yaml new file mode 100644 index 00000000..07ee4e50 --- /dev/null +++ b/tests/assets/configs/scenario_with_placeholders/schedule.yaml @@ -0,0 +1,14 @@ +base_scenario: scenario.yaml +schedule: + 0: + - greens_0.yaml + - reds_0.yaml + 1: + - greens_0.yaml + - reds_1.yaml + 2: + - greens_1.yaml + - reds_1.yaml + 3: + - greens_2.yaml + - reds_2.yaml diff --git a/tests/e2e_integration_tests/environments/test_sb3_environment.py b/tests/e2e_integration_tests/environments/test_sb3_environment.py index 83965191..f6ff595f 100644 --- a/tests/e2e_integration_tests/environments/test_sb3_environment.py +++ b/tests/e2e_integration_tests/environments/test_sb3_environment.py @@ -16,7 +16,7 @@ def test_sb3_compatibility(): with open(data_manipulation_config_path(), "r") as f: cfg = yaml.safe_load(f) - gym = PrimaiteGymEnv(game_config=cfg) + gym = PrimaiteGymEnv(env_config=cfg) model = PPO("MlpPolicy", gym) model.learn(total_timesteps=1000) diff --git a/tests/e2e_integration_tests/test_environment.py b/tests/e2e_integration_tests/test_environment.py index 673e1dc4..accfad50 100644 --- a/tests/e2e_integration_tests/test_environment.py +++ b/tests/e2e_integration_tests/test_environment.py @@ -21,7 +21,7 @@ class TestPrimaiteEnvironment: """Check that environment loads correctly from config and it can be reset.""" with open(CFG_PATH, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) def env_checks(): assert env is not None @@ -44,7 +44,7 @@ class TestPrimaiteEnvironment: """Make sure you can go all the way through the session without errors.""" with open(CFG_PATH, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) assert (num_actions := len(env.agent.action_manager.action_map)) == 54 # run every action and make sure there's no crash @@ -88,4 +88,4 @@ class TestPrimaiteEnvironment: with open(MISCONFIGURED_PATH, "r") as f: cfg = yaml.safe_load(f) with pytest.raises(pydantic.ValidationError): - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) diff --git a/tests/e2e_integration_tests/test_uc2_data_manipulation_scenario.py b/tests/e2e_integration_tests/test_uc2_data_manipulation_scenario.py index 0b31a353..db79e504 100644 --- a/tests/e2e_integration_tests/test_uc2_data_manipulation_scenario.py +++ b/tests/e2e_integration_tests/test_uc2_data_manipulation_scenario.py @@ -44,7 +44,7 @@ def test_application_install_uninstall_on_uc2(): with open(TEST_ASSETS_ROOT / "configs/test_application_install.yaml", "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) env.agent.flatten_obs = False env.reset() diff --git a/tests/integration_tests/configuration_file_parsing/test_episode_scheduler.py b/tests/integration_tests/configuration_file_parsing/test_episode_scheduler.py new file mode 100644 index 00000000..6b40fb1a --- /dev/null +++ b/tests/integration_tests/configuration_file_parsing/test_episode_scheduler.py @@ -0,0 +1,68 @@ +import pytest +import yaml + +from primaite.session.environment import PrimaiteGymEnv, PrimaiteRayEnv, PrimaiteRayMARLEnv +from tests.conftest import TEST_ASSETS_ROOT + +folder_path = TEST_ASSETS_ROOT / "configs" / "scenario_with_placeholders" +single_yaml_config = TEST_ASSETS_ROOT / "configs" / "test_primaite_session.yaml" +with open(single_yaml_config, "r") as f: + config_dict = yaml.safe_load(f) + + +@pytest.mark.parametrize("env_type", [PrimaiteGymEnv, PrimaiteRayEnv, PrimaiteRayMARLEnv]) +def test_creating_env_with_folder(env_type): + """Check that the environment can be created with a folder path.""" + + def check_taking_steps(e): + if isinstance(e, PrimaiteRayMARLEnv): + for i in range(9): + e.step({k: i for k in e.game.rl_agents}) + else: + for i in range(9): + e.step(i) + + env = env_type(env_config=folder_path) + assert env is not None + for _ in range(3): # do it multiple times to ensure it loops back to the beginning + assert len(env.game.agents) == 1 + assert "defender" in env.game.agents + check_taking_steps(env) + + env.reset() + assert len(env.game.agents) == 2 + assert "defender" in env.game.agents + assert "red_A" in env.game.agents + check_taking_steps(env) + + env.reset() + assert len(env.game.agents) == 3 + assert all([a in env.game.agents for a in ["defender", "green_A", "red_A"]]) + check_taking_steps(env) + + env.reset() + assert len(env.game.agents) == 3 + assert all([a in env.game.agents for a in ["defender", "green_B", "red_B"]]) + check_taking_steps(env) + + env.reset() + + +@pytest.mark.parametrize( + "env_data, env_type", + [ + (single_yaml_config, PrimaiteGymEnv), + (single_yaml_config, PrimaiteRayEnv), + (single_yaml_config, PrimaiteRayMARLEnv), + (config_dict, PrimaiteGymEnv), + (config_dict, PrimaiteRayEnv), + (config_dict, PrimaiteRayMARLEnv), + ], +) +def test_creating_env_with_static_config(env_data, env_type): + """Check that the environment can be created with a single yaml file.""" + env = env_type(env_config=single_yaml_config) + assert env is not None + agents_before = len(env.game.agents) + env.reset() + assert len(env.game.agents) == agents_before diff --git a/tests/integration_tests/configuration_file_parsing/test_io_settings.py b/tests/integration_tests/configuration_file_parsing/test_io_settings.py index e66350cf..21f56e97 100644 --- a/tests/integration_tests/configuration_file_parsing/test_io_settings.py +++ b/tests/integration_tests/configuration_file_parsing/test_io_settings.py @@ -24,7 +24,7 @@ def test_io_settings(): """Test that the io_settings are loaded correctly.""" with open(BASIC_CONFIG, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) assert env.io.settings is not None diff --git a/tests/integration_tests/game_layer/test_actions.py b/tests/integration_tests/game_layer/test_actions.py index 855bc38d..edaf5d8d 100644 --- a/tests/integration_tests/game_layer/test_actions.py +++ b/tests/integration_tests/game_layer/test_actions.py @@ -507,7 +507,7 @@ def test_firewall_acl_add_remove_rule_integration(): with open(FIREWALL_ACTIONS_NETWORK, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) # 1: Check that traffic is normal and acl starts off with 4 rules. firewall = env.game.simulation.network.get_node_by_hostname("firewall") @@ -598,7 +598,7 @@ def test_firewall_port_disable_enable_integration(): with open(FIREWALL_ACTIONS_NETWORK, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) firewall = env.game.simulation.network.get_node_by_hostname("firewall") assert firewall.dmz_port.enabled == True diff --git a/tests/integration_tests/game_layer/test_rewards.py b/tests/integration_tests/game_layer/test_rewards.py index cfd013bc..7c38057e 100644 --- a/tests/integration_tests/game_layer/test_rewards.py +++ b/tests/integration_tests/game_layer/test_rewards.py @@ -103,7 +103,7 @@ def test_shared_reward(): with open(CFG_PATH, "r") as f: cfg = yaml.safe_load(f) - env = PrimaiteGymEnv(game_config=cfg) + env = PrimaiteGymEnv(env_config=cfg) env.reset() diff --git a/tests/unit_tests/_primaite/_session/__init__.py b/tests/unit_tests/_primaite/_session/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit_tests/_primaite/_session/test_episode_schedule.py b/tests/unit_tests/_primaite/_session/test_episode_schedule.py new file mode 100644 index 00000000..5d28f24e --- /dev/null +++ b/tests/unit_tests/_primaite/_session/test_episode_schedule.py @@ -0,0 +1,52 @@ +# FILEPATH: /home/cade/repos/PrimAITE/tests/unit_tests/_primaite/_session/test_episode_schedule.py + +import pytest +import yaml + +from primaite.session.episode_schedule import ConstantEpisodeScheduler, EpisodeListScheduler + + +def test_episode_list_scheduler(): + # Initialize an instance of EpisodeListScheduler + + # Define a schedule and episode data for testing + schedule = {0: ["episode1"], 1: ["episode2"]} + episode_data = {"episode1": "data1: 1", "episode2": "data2: 2"} + base_scenario = """agents: []""" + + scheduler = EpisodeListScheduler(schedule=schedule, episode_data=episode_data, base_scenario=base_scenario) + # Test when episode number is within the schedule + result = scheduler(0) + assert isinstance(result, dict) + assert yaml.safe_load("data1: 1\nagents: []") == result + + # Test next episode + result = scheduler(1) + assert isinstance(result, dict) + assert yaml.safe_load("data2: 2\nagents: []") == result + + # Test when episode number exceeds the schedule + result = scheduler(2) + assert isinstance(result, dict) + assert yaml.safe_load("data1: 1\nagents: []") == result + assert scheduler._exceeded_episode_list + + # Test when episode number is a sequence + scheduler.schedule = {0: ["episode1", "episode2"]} + result = scheduler(0) + assert isinstance(result, dict) + assert yaml.safe_load("data1: 1\ndata2: 2\nagents: []") == result + + +def test_constant_episode_scheduler(): + # Initialize an instance of ConstantEpisodeScheduler + config = {"key": "value"} + scheduler = ConstantEpisodeScheduler(config=config) + + result = scheduler(0) + assert isinstance(result, dict) + assert {"key": "value"} == result + + result = scheduler(1) + assert isinstance(result, dict) + assert {"key": "value"} == result From 736408f8b4b0ac070be6e49544ed5feb2ccccb16 Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Thu, 25 Apr 2024 15:12:46 +0100 Subject: [PATCH 6/8] Reference the episode schedule notebook in docs --- docs/source/config.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/source/config.rst b/docs/source/config.rst index b334d99b..57948ae2 100644 --- a/docs/source/config.rst +++ b/docs/source/config.rst @@ -5,7 +5,7 @@ PrimAITE |VERSION| Configuration ******************************** -PrimAITE uses a single configuration file to define everything needed to create the training environment for RL agents, including the network, the scripted agents, and the RL agent's action space, observation space, and reward function. +PrimAITE uses YAML configuration files to define everything needed to create the training environment for RL agents, including the network, the scripted agents, and the RL agent's action space, observation space, and reward function. Example Configuration Hierarchy ############################### @@ -34,3 +34,8 @@ Configurable items configuration/game.rst configuration/agents.rst configuration/simulation.rst + +Varying The Configuration Each Episode +###################################### + +PrimAITE allows for the configuration to be varied each episode. This is done by specifying a configuration folder instead of a single file. A full explanation is provided in the notebook `Using-Episode-Schedules.ipynb`. Please find the notebook in the user notebooks directory. From 37945c00656536a3f8a49ccda9306843fb7734e1 Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Thu, 25 Apr 2024 15:16:20 +0100 Subject: [PATCH 7/8] #2476 Remove temporary testing file --- src/primaite/notebooks/variables.yaml | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 src/primaite/notebooks/variables.yaml diff --git a/src/primaite/notebooks/variables.yaml b/src/primaite/notebooks/variables.yaml deleted file mode 100644 index cb4637e0..00000000 --- a/src/primaite/notebooks/variables.yaml +++ /dev/null @@ -1,7 +0,0 @@ -placeholder_1: &placeholder_1 - - a - - b - -placeholder_2: &placeholder_2 - - c - - d From 30cc04411db4270f06c5ae4beb591e90fb738b3b Mon Sep 17 00:00:00 2001 From: Marek Wolan Date: Fri, 26 Apr 2024 16:23:02 +0100 Subject: [PATCH 8/8] #2476 apply pr comments --- src/primaite/notebooks/Using-Episode-Schedules.ipynb | 4 ++-- tests/unit_tests/_primaite/_session/test_episode_schedule.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/primaite/notebooks/Using-Episode-Schedules.ipynb b/src/primaite/notebooks/Using-Episode-Schedules.ipynb index c616a410..34d6d9b5 100644 --- a/src/primaite/notebooks/Using-Episode-Schedules.ipynb +++ b/src/primaite/notebooks/Using-Episode-Schedules.ipynb @@ -20,8 +20,8 @@ "## Defining variations in the config file.\n", "\n", "### Base scenario\n", - "The base scenario is essentially the same as a fixed yaml configuration, but it can contain placeholders that are \n", - "populated with different things at runtime each episode. The base scenario contains any network, agent, or settings that\n", + "The base scenario is essentially the same as a fixed YAML configuration, but it can contain placeholders that are \n", + "populated with episode-specific data at runtime. The base scenario contains any network, agent, or settings that\n", "remain fixed for the entire training/evaluation session.\n", "\n", "The placeholders are defined as YAML Aliases and they are denoted by an asterisk (`*placeholder`).\n", diff --git a/tests/unit_tests/_primaite/_session/test_episode_schedule.py b/tests/unit_tests/_primaite/_session/test_episode_schedule.py index 5d28f24e..25a68cbb 100644 --- a/tests/unit_tests/_primaite/_session/test_episode_schedule.py +++ b/tests/unit_tests/_primaite/_session/test_episode_schedule.py @@ -1,5 +1,3 @@ -# FILEPATH: /home/cade/repos/PrimAITE/tests/unit_tests/_primaite/_session/test_episode_schedule.py - import pytest import yaml