Skip to content

Commit fd565d9

Browse files
hyperpolymathclaude
andcommitted
chore: standardise, Justfile, (rename,, fix, parse, errors,, remove, useless, commands), fix, Rust, lint/fmt, issues
Batch Justfile audit: standardised naming (lowercase→Justfile), fixed parse errors, removed useless build-riscv from non-Rust repos, added missing assail recipe, and fixed code quality issues. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent bf5cf14 commit fd565d9

31 files changed

Lines changed: 484 additions & 416 deletions
File renamed without changes.

src/analyzer/complexity.rs

Lines changed: 30 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ fn check_logic_patterns(content: &str) -> bool {
6767
// Common conditional patterns across formats
6868
let patterns = [
6969
"if ", "else ", "then ", " ? ", " : ", // Ternary and conditionals
70-
"match ", "case ", "when ", // Pattern matching
71-
"&&", "||", " and ", " or ", // Logical operators
70+
"match ", "case ", "when ", // Pattern matching
71+
"&&", "||", " and ", " or ", // Logical operators
7272
];
7373

7474
patterns.iter().any(|p| content.contains(p))
@@ -91,28 +91,40 @@ fn check_function_patterns(content: &str) -> bool {
9191

9292
fn check_constraint_patterns(content: &str) -> bool {
9393
let patterns = [
94-
">=", "<=", ">", "<", // Comparison operators
95-
"& ", "| ", // CUE unification/disjunction
96-
"=~", // Regex matching
97-
"!~", // Negative regex
98-
"min:", "max:", // JSON Schema style
99-
"minLength", "maxLength",
94+
">=",
95+
"<=",
96+
">",
97+
"<", // Comparison operators
98+
"& ",
99+
"| ", // CUE unification/disjunction
100+
"=~", // Regex matching
101+
"!~", // Negative regex
102+
"min:",
103+
"max:", // JSON Schema style
104+
"minLength",
105+
"maxLength",
100106
"pattern:",
101-
"| *", // CUE default
102-
"_|_", // CUE bottom
107+
"| *", // CUE default
108+
"_|_", // CUE bottom
103109
];
104110

105111
patterns.iter().any(|p| content.contains(p))
106112
}
107113

108114
fn check_generation_patterns(content: &str) -> bool {
109115
let patterns = [
110-
"for ", "foreach ", // Loop patterns
111-
"map(", "filter(", "fold(", // Functional patterns
112-
"Array.from", "Array.map", // Array generation
113-
"std.range", "std.map", // Nickel stdlib
114-
"[for ", "{ for ", // CUE comprehensions
115-
"...", // Spread operators
116+
"for ",
117+
"foreach ", // Loop patterns
118+
"map(",
119+
"filter(",
120+
"fold(", // Functional patterns
121+
"Array.from",
122+
"Array.map", // Array generation
123+
"std.range",
124+
"std.map", // Nickel stdlib
125+
"[for ",
126+
"{ for ", // CUE comprehensions
127+
"...", // Spread operators
116128
];
117129

118130
patterns.iter().any(|p| content.contains(p))
@@ -147,7 +159,8 @@ fn check_repetition(content: &str) -> bool {
147159
}
148160

149161
// Look for repeated patterns (simplified)
150-
let mut pattern_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
162+
let mut pattern_counts: std::collections::HashMap<&str, usize> =
163+
std::collections::HashMap::new();
151164

152165
for line in &lines {
153166
let trimmed = line.trim();

src/analyzer/mod.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,13 @@ impl ConfigAnalyzer {
4141
/// Analyze a configuration file
4242
pub async fn analyze(&self, path: &Path) -> Result<Analysis, ConflowError> {
4343
// Read file content
44-
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
45-
ConflowError::FileReadError {
46-
path: path.to_path_buf(),
47-
error: e.to_string(),
48-
}
49-
})?;
44+
let content =
45+
tokio::fs::read_to_string(path)
46+
.await
47+
.map_err(|e| ConflowError::FileReadError {
48+
path: path.to_path_buf(),
49+
error: e.to_string(),
50+
})?;
5051

5152
// Detect format
5253
let format = config_detector::detect_format(&content, path)?;

src/analyzer/patterns.rs

Lines changed: 26 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -83,40 +83,51 @@ fn has_homogeneous_lists(content: &str) -> bool {
8383
}
8484

8585
fn is_environment_config(content: &str) -> bool {
86-
let env_keywords = ["dev", "staging", "prod", "production", "test", "development"];
87-
env_keywords.iter().any(|k| content.to_lowercase().contains(k))
86+
let env_keywords = [
87+
"dev",
88+
"staging",
89+
"prod",
90+
"production",
91+
"test",
92+
"development",
93+
];
94+
env_keywords
95+
.iter()
96+
.any(|k| content.to_lowercase().contains(k))
8897
}
8998

9099
fn is_schema_definition(content: &str) -> bool {
91100
// Look for type definition patterns
92101
let schema_patterns = [
93-
"#", // CUE definitions
94-
"$schema", // JSON Schema
95-
"type:", // Various
102+
"#", // CUE definitions
103+
"$schema", // JSON Schema
104+
"type:", // Various
96105
"properties:", // JSON Schema
97-
"required:", // JSON Schema
106+
"required:", // JSON Schema
98107
];
99108
schema_patterns.iter().any(|p| content.contains(p))
100109
}
101110

102111
fn is_templated(content: &str) -> bool {
103112
// Look for template variable patterns
104113
let template_patterns = [
105-
"{{", "}}", // Mustache/Handlebars
106-
"${", "}", // Shell/Terraform style
107-
"<%", "%>", // ERB style
108-
"[[", "]]", // Alternative brackets
114+
"{{", "}}", // Mustache/Handlebars
115+
"${", "}", // Shell/Terraform style
116+
"<%", "%>", // ERB style
117+
"[[", "]]", // Alternative brackets
109118
];
110119

111-
template_patterns.chunks(2).any(|pair| {
112-
content.contains(pair[0]) && content.contains(pair[1])
113-
})
120+
template_patterns
121+
.chunks(2)
122+
.any(|pair| content.contains(pair[0]) && content.contains(pair[1]))
114123
}
115124

116125
fn is_resource_definition(content: &str) -> bool {
117126
let resource_patterns = [
118-
"apiVersion:", "kind:", // Kubernetes
119-
"resource \"", "provider \"", // Terraform
127+
"apiVersion:",
128+
"kind:", // Kubernetes
129+
"resource \"",
130+
"provider \"", // Terraform
120131
"AWSTemplateFormatVersion", // CloudFormation
121132
];
122133
resource_patterns.iter().any(|p| content.contains(p))

src/cache/filesystem.rs

Lines changed: 38 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,10 @@ impl FilesystemCache {
3232
})?;
3333
}
3434

35-
Ok(Self { cache_dir, base_dir })
35+
Ok(Self {
36+
cache_dir,
37+
base_dir,
38+
})
3639
}
3740

3841
/// Create cache with default directory
@@ -63,8 +66,8 @@ impl FilesystemCache {
6366
}
6467

6568
// Walk cache directory
66-
for prefix_dir in std::fs::read_dir(&self.cache_dir)
67-
.map_err(|e| ConflowError::CacheError {
69+
for prefix_dir in
70+
std::fs::read_dir(&self.cache_dir).map_err(|e| ConflowError::CacheError {
6871
message: format!("Failed to read cache directory: {}", e),
6972
})?
7073
{
@@ -78,8 +81,8 @@ impl FilesystemCache {
7881
continue;
7982
}
8083

81-
for entry_file in std::fs::read_dir(&prefix_dir)
82-
.map_err(|e| ConflowError::CacheError {
84+
for entry_file in
85+
std::fs::read_dir(&prefix_dir).map_err(|e| ConflowError::CacheError {
8386
message: format!("Failed to read cache subdirectory: {}", e),
8487
})?
8588
{
@@ -117,17 +120,17 @@ impl Cache for FilesystemCache {
117120
}
118121

119122
// Read cached entry
120-
let content = tokio::fs::read_to_string(&path).await.map_err(|e| {
121-
ConflowError::CacheError {
122-
message: format!("Failed to read cache entry: {}", e),
123-
}
124-
})?;
123+
let content =
124+
tokio::fs::read_to_string(&path)
125+
.await
126+
.map_err(|e| ConflowError::CacheError {
127+
message: format!("Failed to read cache entry: {}", e),
128+
})?;
125129

126-
let entry: CachedEntry = serde_json::from_str(&content).map_err(|e| {
127-
ConflowError::CacheError {
130+
let entry: CachedEntry =
131+
serde_json::from_str(&content).map_err(|e| ConflowError::CacheError {
128132
message: format!("Failed to parse cache entry: {}", e),
129-
}
130-
})?;
133+
})?;
131134

132135
// Verify outputs still exist
133136
for output in &entry.result.outputs {
@@ -152,11 +155,11 @@ impl Cache for FilesystemCache {
152155

153156
// Create parent directory
154157
if let Some(parent) = path.parent() {
155-
tokio::fs::create_dir_all(parent).await.map_err(|e| {
156-
ConflowError::CacheError {
158+
tokio::fs::create_dir_all(parent)
159+
.await
160+
.map_err(|e| ConflowError::CacheError {
157161
message: format!("Failed to create cache directory: {}", e),
158-
}
159-
})?;
162+
})?;
160163
}
161164

162165
let entry = CachedEntry {
@@ -170,9 +173,11 @@ impl Cache for FilesystemCache {
170173
message: format!("Failed to serialize cache entry: {}", e),
171174
})?;
172175

173-
tokio::fs::write(&path, json).await.map_err(|e| ConflowError::CacheError {
174-
message: format!("Failed to write cache entry: {}", e),
175-
})?;
176+
tokio::fs::write(&path, json)
177+
.await
178+
.map_err(|e| ConflowError::CacheError {
179+
message: format!("Failed to write cache entry: {}", e),
180+
})?;
176181

177182
Ok(())
178183
}
@@ -182,29 +187,29 @@ impl Cache for FilesystemCache {
182187
let path = self.cache_path(&key);
183188

184189
if path.exists() {
185-
tokio::fs::remove_file(&path).await.map_err(|e| {
186-
ConflowError::CacheError {
190+
tokio::fs::remove_file(&path)
191+
.await
192+
.map_err(|e| ConflowError::CacheError {
187193
message: format!("Failed to remove cache entry: {}", e),
188-
}
189-
})?;
194+
})?;
190195
}
191196

192197
Ok(())
193198
}
194199

195200
async fn clear(&self) -> Result<(), ConflowError> {
196201
if self.cache_dir.exists() {
197-
tokio::fs::remove_dir_all(&self.cache_dir).await.map_err(|e| {
198-
ConflowError::CacheError {
202+
tokio::fs::remove_dir_all(&self.cache_dir)
203+
.await
204+
.map_err(|e| ConflowError::CacheError {
199205
message: format!("Failed to clear cache: {}", e),
200-
}
201-
})?;
206+
})?;
202207

203-
tokio::fs::create_dir_all(&self.cache_dir).await.map_err(|e| {
204-
ConflowError::CacheError {
208+
tokio::fs::create_dir_all(&self.cache_dir)
209+
.await
210+
.map_err(|e| ConflowError::CacheError {
205211
message: format!("Failed to recreate cache directory: {}", e),
206-
}
207-
})?;
212+
})?;
208213
}
209214

210215
Ok(())

src/cache/hash.rs

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -30,23 +30,23 @@ impl ContentHasher {
3030
self.hasher.update(stage.name.as_bytes());
3131

3232
// Hash tool configuration
33-
let tool_json = serde_json::to_string(&stage.tool)
34-
.map_err(|e| ConflowError::CacheError {
33+
let tool_json =
34+
serde_json::to_string(&stage.tool).map_err(|e| ConflowError::CacheError {
3535
message: format!("Failed to serialize tool config: {}", e),
3636
})?;
3737
self.hasher.update(tool_json.as_bytes());
3838

3939
// Hash input specification
40-
let input_json = serde_json::to_string(&stage.input)
41-
.map_err(|e| ConflowError::CacheError {
40+
let input_json =
41+
serde_json::to_string(&stage.input).map_err(|e| ConflowError::CacheError {
4242
message: format!("Failed to serialize input config: {}", e),
4343
})?;
4444
self.hasher.update(input_json.as_bytes());
4545

4646
// Hash output specification
4747
if let Some(ref output) = stage.output {
48-
let output_json = serde_json::to_string(output)
49-
.map_err(|e| ConflowError::CacheError {
48+
let output_json =
49+
serde_json::to_string(output).map_err(|e| ConflowError::CacheError {
5050
message: format!("Failed to serialize output config: {}", e),
5151
})?;
5252
self.hasher.update(output_json.as_bytes());
@@ -103,8 +103,9 @@ impl ContentHasher {
103103
base_dir.join(pattern).to_string_lossy().to_string()
104104
};
105105

106-
let matches = glob::glob(&full_pattern)
107-
.map_err(|e| ConflowError::GlobPattern { message: e.to_string() })?;
106+
let matches = glob::glob(&full_pattern).map_err(|e| ConflowError::GlobPattern {
107+
message: e.to_string(),
108+
})?;
108109

109110
for entry in matches {
110111
if let Ok(path) = entry {

0 commit comments

Comments
 (0)