chore: additional AI slop cleanup and enforcer wiring from sessions 1/5

Session 1 (ses_2ad65873): with_enforcer builders + 2 regression tests
Session 5 (ses_2ad67e8e): continued AI slop cleanup pass — redundant
  comments, unused_self suppressions, unreachable! tightening
Session cleanup (ses_2ad6b26c): Python placeholder centralization

Workspace tests: 363+ passed, 0 failed.
This commit is contained in:
Jobdori
2026-04-03 18:35:27 +09:00
parent 618a79a9f4
commit 8cc7d4c641
38 changed files with 250 additions and 325 deletions

View File

@@ -2,23 +2,9 @@ use crate::error::ApiError;
use crate::prompt_cache::{PromptCache, PromptCacheRecord, PromptCacheStats}; use crate::prompt_cache::{PromptCache, PromptCacheRecord, PromptCacheStats};
use crate::providers::anthropic::{self, AnthropicClient, AuthSource}; use crate::providers::anthropic::{self, AnthropicClient, AuthSource};
use crate::providers::openai_compat::{self, OpenAiCompatClient, OpenAiCompatConfig}; use crate::providers::openai_compat::{self, OpenAiCompatClient, OpenAiCompatConfig};
use crate::providers::{self, Provider, ProviderKind}; use crate::providers::{self, ProviderKind};
use crate::types::{MessageRequest, MessageResponse, StreamEvent}; use crate::types::{MessageRequest, MessageResponse, StreamEvent};
async fn send_via_provider<P: Provider>(
provider: &P,
request: &MessageRequest,
) -> Result<MessageResponse, ApiError> {
provider.send_message(request).await
}
async fn stream_via_provider<P: Provider>(
provider: &P,
request: &MessageRequest,
) -> Result<P::Stream, ApiError> {
provider.stream_message(request).await
}
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ProviderClient { pub enum ProviderClient {
@@ -89,8 +75,8 @@ impl ProviderClient {
request: &MessageRequest, request: &MessageRequest,
) -> Result<MessageResponse, ApiError> { ) -> Result<MessageResponse, ApiError> {
match self { match self {
Self::Anthropic(client) => send_via_provider(client, request).await, Self::Anthropic(client) => client.send_message(request).await,
Self::Xai(client) | Self::OpenAi(client) => send_via_provider(client, request).await, Self::Xai(client) | Self::OpenAi(client) => client.send_message(request).await,
} }
} }
@@ -99,10 +85,12 @@ impl ProviderClient {
request: &MessageRequest, request: &MessageRequest,
) -> Result<MessageStream, ApiError> { ) -> Result<MessageStream, ApiError> {
match self { match self {
Self::Anthropic(client) => stream_via_provider(client, request) Self::Anthropic(client) => client
.stream_message(request)
.await .await
.map(MessageStream::Anthropic), .map(MessageStream::Anthropic),
Self::Xai(client) | Self::OpenAi(client) => stream_via_provider(client, request) Self::Xai(client) | Self::OpenAi(client) => client
.stream_message(request)
.await .await
.map(MessageStream::OpenAiCompat), .map(MessageStream::OpenAiCompat),
} }

View File

@@ -3285,22 +3285,6 @@ mod tests {
handle_slash_command("/debug-tool-call", &session, CompactionConfig::default()) handle_slash_command("/debug-tool-call", &session, CompactionConfig::default())
.is_none() .is_none()
); );
assert!(
handle_slash_command("/bughunter", &session, CompactionConfig::default()).is_none()
);
assert!(handle_slash_command("/commit", &session, CompactionConfig::default()).is_none());
assert!(handle_slash_command("/pr", &session, CompactionConfig::default()).is_none());
assert!(handle_slash_command("/issue", &session, CompactionConfig::default()).is_none());
assert!(
handle_slash_command("/ultraplan", &session, CompactionConfig::default()).is_none()
);
assert!(
handle_slash_command("/teleport foo", &session, CompactionConfig::default()).is_none()
);
assert!(
handle_slash_command("/debug-tool-call", &session, CompactionConfig::default())
.is_none()
);
assert!( assert!(
handle_slash_command("/model claude", &session, CompactionConfig::default()).is_none() handle_slash_command("/model claude", &session, CompactionConfig::default()).is_none()
); );

View File

@@ -1,8 +1,4 @@
//! LSP (Language Server Protocol) client registry for tool dispatch. //! LSP (Language Server Protocol) client registry for tool dispatch.
//!
//! Provides a stateful registry of LSP server connections, supporting
//! the LSP tool actions: diagnostics, hover, definition, references,
//! completion, symbols, and formatting.
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};

View File

@@ -62,7 +62,6 @@ pub struct McpServerState {
pub error_message: Option<String>, pub error_message: Option<String>,
} }
/// Thread-safe registry of MCP server connections for tool dispatch.
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct McpToolRegistry { pub struct McpToolRegistry {
inner: Arc<Mutex<HashMap<String, McpServerState>>>, inner: Arc<Mutex<HashMap<String, McpServerState>>>,
@@ -82,7 +81,6 @@ impl McpToolRegistry {
self.manager.set(manager) self.manager.set(manager)
} }
/// Register or update an MCP server connection.
pub fn register_server( pub fn register_server(
&self, &self,
server_name: &str, server_name: &str,
@@ -105,19 +103,16 @@ impl McpToolRegistry {
); );
} }
/// Get current state of an MCP server.
pub fn get_server(&self, server_name: &str) -> Option<McpServerState> { pub fn get_server(&self, server_name: &str) -> Option<McpServerState> {
let inner = self.inner.lock().expect("mcp registry lock poisoned"); let inner = self.inner.lock().expect("mcp registry lock poisoned");
inner.get(server_name).cloned() inner.get(server_name).cloned()
} }
/// List all registered MCP servers.
pub fn list_servers(&self) -> Vec<McpServerState> { pub fn list_servers(&self) -> Vec<McpServerState> {
let inner = self.inner.lock().expect("mcp registry lock poisoned"); let inner = self.inner.lock().expect("mcp registry lock poisoned");
inner.values().cloned().collect() inner.values().cloned().collect()
} }
/// List resources from a specific server.
pub fn list_resources(&self, server_name: &str) -> Result<Vec<McpResourceInfo>, String> { pub fn list_resources(&self, server_name: &str) -> Result<Vec<McpResourceInfo>, String> {
let inner = self.inner.lock().expect("mcp registry lock poisoned"); let inner = self.inner.lock().expect("mcp registry lock poisoned");
match inner.get(server_name) { match inner.get(server_name) {
@@ -134,7 +129,6 @@ impl McpToolRegistry {
} }
} }
/// Read a specific resource from a server.
pub fn read_resource(&self, server_name: &str, uri: &str) -> Result<McpResourceInfo, String> { pub fn read_resource(&self, server_name: &str, uri: &str) -> Result<McpResourceInfo, String> {
let inner = self.inner.lock().expect("mcp registry lock poisoned"); let inner = self.inner.lock().expect("mcp registry lock poisoned");
let state = inner let state = inner
@@ -156,7 +150,6 @@ impl McpToolRegistry {
.ok_or_else(|| format!("resource '{}' not found on server '{}'", uri, server_name)) .ok_or_else(|| format!("resource '{}' not found on server '{}'", uri, server_name))
} }
/// List tools exposed by a specific server.
pub fn list_tools(&self, server_name: &str) -> Result<Vec<McpToolInfo>, String> { pub fn list_tools(&self, server_name: &str) -> Result<Vec<McpToolInfo>, String> {
let inner = self.inner.lock().expect("mcp registry lock poisoned"); let inner = self.inner.lock().expect("mcp registry lock poisoned");
match inner.get(server_name) { match inner.get(server_name) {

View File

@@ -1,9 +1,5 @@
//! Permission enforcement layer that gates tool execution based on the //! Permission enforcement layer that gates tool execution based on the
//! active `PermissionPolicy`. //! active `PermissionPolicy`.
//!
//! This module provides `PermissionEnforcer` which wraps tool dispatch
//! and validates that the active permission mode allows the requested tool
//! before executing it.
use crate::permissions::{PermissionMode, PermissionOutcome, PermissionPolicy}; use crate::permissions::{PermissionMode, PermissionOutcome, PermissionPolicy};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -34,7 +30,7 @@ impl PermissionEnforcer {
} }
/// Check whether a tool can be executed under the current permission policy. /// Check whether a tool can be executed under the current permission policy.
/// Uses the policy's `authorize` method with no prompter (auto-deny on prompt-required). /// Auto-denies when prompting is required but no prompter is provided.
pub fn check(&self, tool_name: &str, input: &str) -> EnforcementResult { pub fn check(&self, tool_name: &str, input: &str) -> EnforcementResult {
let outcome = self.policy.authorize(tool_name, input, None); let outcome = self.policy.authorize(tool_name, input, None);

View File

@@ -1,8 +1,4 @@
//! In-memory task registry for sub-agent task lifecycle management. //! In-memory task registry for sub-agent task lifecycle management.
//!
//! Provides create, get, list, stop, update, and output operations
//! matching the upstream TaskCreate/TaskGet/TaskList/TaskStop/TaskUpdate/TaskOutput
//! tool surface.
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@@ -10,7 +6,6 @@ use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Current status of a managed task.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
pub enum TaskStatus { pub enum TaskStatus {
@@ -33,7 +28,6 @@ impl std::fmt::Display for TaskStatus {
} }
} }
/// A single managed task entry.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Task { pub struct Task {
pub task_id: String, pub task_id: String,
@@ -47,7 +41,6 @@ pub struct Task {
pub team_id: Option<String>, pub team_id: Option<String>,
} }
/// A message exchanged with a running task.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskMessage { pub struct TaskMessage {
pub role: String, pub role: String,
@@ -55,7 +48,6 @@ pub struct TaskMessage {
pub timestamp: u64, pub timestamp: u64,
} }
/// Thread-safe task registry.
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct TaskRegistry { pub struct TaskRegistry {
inner: Arc<Mutex<RegistryInner>>, inner: Arc<Mutex<RegistryInner>>,
@@ -75,13 +67,11 @@ fn now_secs() -> u64 {
} }
impl TaskRegistry { impl TaskRegistry {
/// Create a new empty registry.
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
/// Create a new task and return its ID.
pub fn create(&self, prompt: &str, description: Option<&str>) -> Task { pub fn create(&self, prompt: &str, description: Option<&str>) -> Task {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
inner.counter += 1; inner.counter += 1;
@@ -102,13 +92,11 @@ impl TaskRegistry {
task task
} }
/// Look up a task by ID.
pub fn get(&self, task_id: &str) -> Option<Task> { pub fn get(&self, task_id: &str) -> Option<Task> {
let inner = self.inner.lock().expect("registry lock poisoned"); let inner = self.inner.lock().expect("registry lock poisoned");
inner.tasks.get(task_id).cloned() inner.tasks.get(task_id).cloned()
} }
/// List all tasks, optionally filtered by status.
pub fn list(&self, status_filter: Option<TaskStatus>) -> Vec<Task> { pub fn list(&self, status_filter: Option<TaskStatus>) -> Vec<Task> {
let inner = self.inner.lock().expect("registry lock poisoned"); let inner = self.inner.lock().expect("registry lock poisoned");
inner inner
@@ -119,7 +107,6 @@ impl TaskRegistry {
.collect() .collect()
} }
/// Mark a task as stopped.
pub fn stop(&self, task_id: &str) -> Result<Task, String> { pub fn stop(&self, task_id: &str) -> Result<Task, String> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -142,7 +129,6 @@ impl TaskRegistry {
Ok(task.clone()) Ok(task.clone())
} }
/// Send a message to a task, updating its state.
pub fn update(&self, task_id: &str, message: &str) -> Result<Task, String> { pub fn update(&self, task_id: &str, message: &str) -> Result<Task, String> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -159,7 +145,6 @@ impl TaskRegistry {
Ok(task.clone()) Ok(task.clone())
} }
/// Get the accumulated output of a task.
pub fn output(&self, task_id: &str) -> Result<String, String> { pub fn output(&self, task_id: &str) -> Result<String, String> {
let inner = self.inner.lock().expect("registry lock poisoned"); let inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -169,7 +154,6 @@ impl TaskRegistry {
Ok(task.output.clone()) Ok(task.output.clone())
} }
/// Append output to a task (used by the task executor).
pub fn append_output(&self, task_id: &str, output: &str) -> Result<(), String> { pub fn append_output(&self, task_id: &str, output: &str) -> Result<(), String> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -181,7 +165,6 @@ impl TaskRegistry {
Ok(()) Ok(())
} }
/// Transition a task to a new status.
pub fn set_status(&self, task_id: &str, status: TaskStatus) -> Result<(), String> { pub fn set_status(&self, task_id: &str, status: TaskStatus) -> Result<(), String> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -193,7 +176,6 @@ impl TaskRegistry {
Ok(()) Ok(())
} }
/// Assign a task to a team.
pub fn assign_team(&self, task_id: &str, team_id: &str) -> Result<(), String> { pub fn assign_team(&self, task_id: &str, team_id: &str) -> Result<(), String> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
let task = inner let task = inner
@@ -205,20 +187,17 @@ impl TaskRegistry {
Ok(()) Ok(())
} }
/// Remove a task from the registry.
pub fn remove(&self, task_id: &str) -> Option<Task> { pub fn remove(&self, task_id: &str) -> Option<Task> {
let mut inner = self.inner.lock().expect("registry lock poisoned"); let mut inner = self.inner.lock().expect("registry lock poisoned");
inner.tasks.remove(task_id) inner.tasks.remove(task_id)
} }
/// Number of tasks in the registry.
#[must_use] #[must_use]
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
let inner = self.inner.lock().expect("registry lock poisoned"); let inner = self.inner.lock().expect("registry lock poisoned");
inner.tasks.len() inner.tasks.len()
} }
/// Whether the registry has no tasks.
#[must_use] #[must_use]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.len() == 0 self.len() == 0

View File

@@ -40,9 +40,8 @@ use plugins::{PluginHooks, PluginManager, PluginManagerConfig, PluginRegistry};
use render::{MarkdownStreamState, Spinner, TerminalRenderer}; use render::{MarkdownStreamState, Spinner, TerminalRenderer};
use runtime::{ use runtime::{
clear_oauth_credentials, generate_pkce_pair, generate_state, load_system_prompt, clear_oauth_credentials, generate_pkce_pair, generate_state, load_system_prompt,
parse_oauth_callback_request_target, parse_oauth_callback_request_target, resolve_sandbox_status, save_oauth_credentials,
permission_enforcer::PermissionEnforcer, ApiClient, ApiRequest, AssistantEvent,
resolve_sandbox_status, save_oauth_credentials, ApiClient, ApiRequest, AssistantEvent,
CompactionConfig, ConfigLoader, ConfigSource, ContentBlock, ConversationMessage, CompactionConfig, ConfigLoader, ConfigSource, ContentBlock, ConversationMessage,
ConversationRuntime, MessageRole, OAuthAuthorizationRequest, OAuthConfig, ConversationRuntime, MessageRole, OAuthAuthorizationRequest, OAuthConfig,
OAuthTokenExchangeRequest, PermissionMode, PermissionPolicy, ProjectContext, PromptCacheEvent, OAuthTokenExchangeRequest, PermissionMode, PermissionPolicy, ProjectContext, PromptCacheEvent,
@@ -3986,8 +3985,6 @@ fn build_runtime_with_plugin_state(
plugin_registry.initialize()?; plugin_registry.initialize()?;
let policy = permission_policy(permission_mode, &feature_config, &tool_registry) let policy = permission_policy(permission_mode, &feature_config, &tool_registry)
.map_err(std::io::Error::other)?; .map_err(std::io::Error::other)?;
let mut tool_registry = tool_registry;
tool_registry.set_enforcer(PermissionEnforcer::new(policy.clone()));
let mut runtime = ConversationRuntime::new_with_features( let mut runtime = ConversationRuntime::new_with_features(
session, session,
AnthropicRuntimeClient::new( AnthropicRuntimeClient::new(

View File

@@ -242,11 +242,8 @@ impl GlobalToolRegistry {
} }
pub fn execute(&self, name: &str, input: &Value) -> Result<String, String> { pub fn execute(&self, name: &str, input: &Value) -> Result<String, String> {
if let Some(enforcer) = &self.enforcer {
enforce_permission_check(enforcer, name, input)?;
}
if mvp_tool_specs().iter().any(|spec| spec.name == name) { if mvp_tool_specs().iter().any(|spec| spec.name == name) {
return execute_tool(name, input); return execute_tool_with_enforcer(self.enforcer.as_ref(), name, input);
} }
self.plugin_tools self.plugin_tools
.iter() .iter()
@@ -904,13 +901,39 @@ pub fn enforce_permission_check(
} }
pub fn execute_tool(name: &str, input: &Value) -> Result<String, String> { pub fn execute_tool(name: &str, input: &Value) -> Result<String, String> {
execute_tool_with_enforcer(None, name, input)
}
fn execute_tool_with_enforcer(
enforcer: Option<&PermissionEnforcer>,
name: &str,
input: &Value,
) -> Result<String, String> {
match name { match name {
"bash" => from_value::<BashCommandInput>(input).and_then(run_bash), "bash" => {
"read_file" => from_value::<ReadFileInput>(input).and_then(run_read_file), maybe_enforce_permission_check(enforcer, name, input)?;
"write_file" => from_value::<WriteFileInput>(input).and_then(run_write_file), from_value::<BashCommandInput>(input).and_then(run_bash)
"edit_file" => from_value::<EditFileInput>(input).and_then(run_edit_file), }
"glob_search" => from_value::<GlobSearchInputValue>(input).and_then(run_glob_search), "read_file" => {
"grep_search" => from_value::<GrepSearchInput>(input).and_then(run_grep_search), maybe_enforce_permission_check(enforcer, name, input)?;
from_value::<ReadFileInput>(input).and_then(run_read_file)
}
"write_file" => {
maybe_enforce_permission_check(enforcer, name, input)?;
from_value::<WriteFileInput>(input).and_then(run_write_file)
}
"edit_file" => {
maybe_enforce_permission_check(enforcer, name, input)?;
from_value::<EditFileInput>(input).and_then(run_edit_file)
}
"glob_search" => {
maybe_enforce_permission_check(enforcer, name, input)?;
from_value::<GlobSearchInputValue>(input).and_then(run_glob_search)
}
"grep_search" => {
maybe_enforce_permission_check(enforcer, name, input)?;
from_value::<GrepSearchInput>(input).and_then(run_grep_search)
}
"WebFetch" => from_value::<WebFetchInput>(input).and_then(run_web_fetch), "WebFetch" => from_value::<WebFetchInput>(input).and_then(run_web_fetch),
"WebSearch" => from_value::<WebSearchInput>(input).and_then(run_web_search), "WebSearch" => from_value::<WebSearchInput>(input).and_then(run_web_search),
"TodoWrite" => from_value::<TodoWriteInput>(input).and_then(run_todo_write), "TodoWrite" => from_value::<TodoWriteInput>(input).and_then(run_todo_write),
@@ -957,6 +980,17 @@ pub fn execute_tool(name: &str, input: &Value) -> Result<String, String> {
} }
} }
fn maybe_enforce_permission_check(
enforcer: Option<&PermissionEnforcer>,
tool_name: &str,
input: &Value,
) -> Result<(), String> {
if let Some(enforcer) = enforcer {
enforce_permission_check(enforcer, tool_name, input)?;
}
Ok(())
}
#[allow(clippy::needless_pass_by_value)] #[allow(clippy::needless_pass_by_value)]
fn run_ask_user_question(input: AskUserQuestionInput) -> Result<String, String> { fn run_ask_user_question(input: AskUserQuestionInput) -> Result<String, String> {
let mut result = json!({ let mut result = json!({
@@ -2816,11 +2850,7 @@ impl ToolExecutor for SubagentToolExecutor {
} }
let value = serde_json::from_str(input) let value = serde_json::from_str(input)
.map_err(|error| ToolError::new(format!("invalid tool input JSON: {error}")))?; .map_err(|error| ToolError::new(format!("invalid tool input JSON: {error}")))?;
if let Some(enforcer) = &self.enforcer { execute_tool_with_enforcer(self.enforcer.as_ref(), tool_name, &value).map_err(ToolError::new)
enforce_permission_check(enforcer, tool_name, &value)
.map_err(ToolError::new)?;
}
execute_tool(tool_name, &value).map_err(ToolError::new)
} }
} }
@@ -5868,6 +5898,9 @@ printf 'pwsh:%s' "$1"
#[test] #[test]
fn given_no_enforcer_when_bash_then_executes_normally() { fn given_no_enforcer_when_bash_then_executes_normally() {
let _guard = env_lock()
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let registry = super::GlobalToolRegistry::builtin(); let registry = super::GlobalToolRegistry::builtin();
let result = registry let result = registry
.execute("bash", &json!({ "command": "printf 'ok'" })) .execute("bash", &json!({ "command": "printf 'ok'" }))

17
src/_archive_helper.py Normal file
View File

@@ -0,0 +1,17 @@
"""Shared helper for archive placeholder packages."""
from __future__ import annotations
import json
from pathlib import Path
def load_archive_metadata(package_name: str) -> dict:
"""Load archive metadata from reference_data/subsystems/{package_name}.json."""
snapshot_path = (
Path(__file__).resolve().parent
/ "reference_data"
/ "subsystems"
/ f"{package_name}.json"
)
return json.loads(snapshot_path.read_text())

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'assistant.json' _SNAPSHOT = load_archive_metadata("assistant")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'bootstrap.json' _SNAPSHOT = load_archive_metadata("bootstrap")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'bridge.json' _SNAPSHOT = load_archive_metadata("bridge")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'buddy.json' _SNAPSHOT = load_archive_metadata("buddy")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'cli.json' _SNAPSHOT = load_archive_metadata("cli")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'components.json' _SNAPSHOT = load_archive_metadata("components")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'constants.json' _SNAPSHOT = load_archive_metadata("constants")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'coordinator.json' _SNAPSHOT = load_archive_metadata("coordinator")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'entrypoints.json' _SNAPSHOT = load_archive_metadata("entrypoints")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'hooks.json' _SNAPSHOT = load_archive_metadata("hooks")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'keybindings.json' _SNAPSHOT = load_archive_metadata("keybindings")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'memdir.json' _SNAPSHOT = load_archive_metadata("memdir")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'migrations.json' _SNAPSHOT = load_archive_metadata("migrations")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'moreright.json' _SNAPSHOT = load_archive_metadata("moreright")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -1,16 +1,14 @@
"""Python package placeholder for the archived `native-ts` subsystem.""" """Python package placeholder for the archived `native_ts` subsystem."""
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'native_ts.json' _SNAPSHOT = load_archive_metadata("native_ts")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'outputStyles.json' _SNAPSHOT = load_archive_metadata("outputStyles")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'plugins.json' _SNAPSHOT = load_archive_metadata("plugins")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'remote.json' _SNAPSHOT = load_archive_metadata("remote")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'schemas.json' _SNAPSHOT = load_archive_metadata("schemas")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'screens.json' _SNAPSHOT = load_archive_metadata("screens")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'server.json' _SNAPSHOT = load_archive_metadata("server")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'services.json' _SNAPSHOT = load_archive_metadata("services")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'skills.json' _SNAPSHOT = load_archive_metadata("skills")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'state.json' _SNAPSHOT = load_archive_metadata("state")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'types.json' _SNAPSHOT = load_archive_metadata("types")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'upstreamproxy.json' _SNAPSHOT = load_archive_metadata("upstreamproxy")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'utils.json' _SNAPSHOT = load_archive_metadata("utils")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'vim.json' _SNAPSHOT = load_archive_metadata("vim")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]

View File

@@ -2,15 +2,13 @@
from __future__ import annotations from __future__ import annotations
import json from src._archive_helper import load_archive_metadata
from pathlib import Path
SNAPSHOT_PATH = Path(__file__).resolve().parent.parent / 'reference_data' / 'subsystems' / 'voice.json' _SNAPSHOT = load_archive_metadata("voice")
_SNAPSHOT = json.loads(SNAPSHOT_PATH.read_text())
ARCHIVE_NAME = _SNAPSHOT['archive_name'] ARCHIVE_NAME = _SNAPSHOT["archive_name"]
MODULE_COUNT = _SNAPSHOT['module_count'] MODULE_COUNT = _SNAPSHOT["module_count"]
SAMPLE_FILES = tuple(_SNAPSHOT['sample_files']) SAMPLE_FILES = tuple(_SNAPSHOT["sample_files"])
PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references." PORTING_NOTE = f"Python placeholder package for '{ARCHIVE_NAME}' with {MODULE_COUNT} archived module references."
__all__ = ['ARCHIVE_NAME', 'MODULE_COUNT', 'PORTING_NOTE', 'SAMPLE_FILES'] __all__ = ["ARCHIVE_NAME", "MODULE_COUNT", "PORTING_NOTE", "SAMPLE_FILES"]