This commit is contained in:
parent
824b9f9c70
commit
61470a51bb
121
12bitfloat_rust/isspam.rs
Normal file
121
12bitfloat_rust/isspam.rs
Normal file
@ -0,0 +1,121 @@
|
||||
#![feature(let_chains)]
|
||||
|
||||
use std::{env, fs};
|
||||
|
||||
fn clean_content(content: &str) -> String {
|
||||
let alloed_ichars = "01234567891abcdefghijklmnopqrstuvwxyz \n.,!?";
|
||||
|
||||
let clean_content = content.chars()
|
||||
.filter(|&c| alloed_ichars.contains(c))
|
||||
.collect::<String>();
|
||||
|
||||
clean_content
|
||||
}
|
||||
|
||||
fn get_sentences(content: &str) -> Vec<&str> {
|
||||
let mut sentences = content.split('.')
|
||||
.map(|s| s.trim_start()) // Remove leading whitespace
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Remove last "sentence" if didn't end with a dot
|
||||
if let Some(last) = sentences.last() && !last.ends_with('.') {
|
||||
sentences.pop();
|
||||
}
|
||||
|
||||
sentences
|
||||
}
|
||||
|
||||
fn get_words(sentences: &str) -> impl Iterator<Item = &str> + Clone {
|
||||
sentences.split_whitespace()
|
||||
}
|
||||
|
||||
fn is_fully_capitalized_word(word: &str) -> bool {
|
||||
word.chars()
|
||||
.all(|c| !c.is_ascii_alphanumeric() || c.is_ascii_uppercase())
|
||||
}
|
||||
|
||||
fn get_capitalized_words(content: &str) -> Vec<&str> {
|
||||
let sentences = get_sentences(content);
|
||||
let mut cap_words = vec![];
|
||||
|
||||
for sentence in sentences {
|
||||
// Always skip the first word since sentences start with
|
||||
for word in get_words(sentence).skip(1) {
|
||||
if is_fully_capitalized_word(word) {
|
||||
cap_words.push(word);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cap_words
|
||||
}
|
||||
|
||||
fn get_numbers(content: &str) -> Vec<String> {
|
||||
let clean = clean_content(content);
|
||||
|
||||
clean.split(|c: char| c.is_ascii_digit())
|
||||
.map(|n| n.to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_forbidden_words(content: &str) -> Vec<&str> {
|
||||
fn check_forbidden(w: &str) -> bool {
|
||||
FORBIDDEN_WORDS.iter()
|
||||
.find(|fw| str::eq_ignore_ascii_case(w, fw))
|
||||
.is_some()
|
||||
}
|
||||
|
||||
get_words(content)
|
||||
.filter(|w| check_forbidden(w))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn analyze(data: &str) {
|
||||
let clean_data = clean_content(data);
|
||||
drop(clean_data); // You aren't actually using clean_data :O
|
||||
|
||||
// All capitalized words
|
||||
let cap_words = get_capitalized_words(data);
|
||||
println!("All capitalized words: {}", cap_words.len());
|
||||
|
||||
// All sentences
|
||||
let sentences = get_sentences(data);
|
||||
println!("Sentences: {}", sentences.len());
|
||||
|
||||
// All words
|
||||
let words = get_words(data);
|
||||
println!("Words: {}", words.clone().count());
|
||||
|
||||
// Numbers
|
||||
let numbers = get_numbers(data);
|
||||
println!("Numbers: {}", numbers.len());
|
||||
|
||||
// Forbidden words
|
||||
let fw = get_forbidden_words(data);
|
||||
println!("Forbidden words: {}", fw.len());
|
||||
|
||||
let word_count_per_sentence = words.count() / sentences.len();
|
||||
println!("Word count per sentence: {}", word_count_per_sentence);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Read in files from args
|
||||
for arg in env::args().skip(1) { // skip program arg
|
||||
let Ok(text) = fs::read_to_string(&arg) else {
|
||||
eprintln!("{arg} isn't a valid file or couldn't be read");
|
||||
continue;
|
||||
};
|
||||
|
||||
analyze(&text);
|
||||
}
|
||||
|
||||
// analyze(&SPAM1);
|
||||
}
|
||||
|
||||
static FORBIDDEN_WORDS: &'static [&'static str] = &[
|
||||
"recovery", "techie", "http", "https", "digital", "hack", "::", "//", "com",
|
||||
"@", "crypto", "bitcoin", "wallet", "hacker", "welcome", "whatsapp", "email", "cryptocurrency",
|
||||
"stolen", "freeze", "quick", "crucial", "tracing", "scammers", "expers", "hire", "century",
|
||||
"transaction", "essential", "managing", "contact", "contacting", "understanding", "assets", "funds"
|
||||
];
|
||||
|
140
12bitfloat_rust/isspam_v1.rs
Normal file
140
12bitfloat_rust/isspam_v1.rs
Normal file
@ -0,0 +1,140 @@
|
||||
#!+[feature(let_chains)]
|
||||
|
||||
|
||||
fn clean_content(content: &str) -> String {
|
||||
let alloed_ichars = "01234567891abcdefghijklmnopqrstuvwxyz \n.,!?";
|
||||
|
||||
let clean_content = content.chars()
|
||||
.filter(|&c| alloed_ichars.contains(c))
|
||||
.collect::<String>();
|
||||
|
||||
clean_content
|
||||
}
|
||||
|
||||
fn get_sentences(content: &str) -> Vec<&str> {
|
||||
let mut sentences = content.split('.')
|
||||
.map(|s| s.trim_start()) // Remove leading whitespace
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Remove last "sentence" if didn't end with a dot
|
||||
if let Some(last) = sentences.last() && !last.ends_with('.') {
|
||||
sentences.pop();
|
||||
}
|
||||
|
||||
sentences
|
||||
}
|
||||
|
||||
fn get_words(sentences: &str) -> impl Iterator<Item = &str> + Clone {
|
||||
sentences.split_whitespace()
|
||||
}
|
||||
|
||||
fn is_fully_capitalized_word(word: &str) -> bool {
|
||||
word.chars()
|
||||
.all(|c| !c.is_ascii_alphanumeric() || c.is_ascii_uppercase())
|
||||
}
|
||||
|
||||
fn get_capitalized_words(content: &str) -> Vec<&str> {
|
||||
let sentences = get_sentences(content);
|
||||
let mut cap_words = vec![];
|
||||
|
||||
for sentence in sentences {
|
||||
// Always skip the first word since sentences start with
|
||||
for word in get_words(sentence).skip(1) {
|
||||
if is_fully_capitalized_word(word) {
|
||||
cap_words.push(word);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cap_words
|
||||
}
|
||||
|
||||
fn get_numbers(content: &str) -> Vec<String> {
|
||||
let clean = clean_content(content);
|
||||
|
||||
clean.split(|c: char| c.is_ascii_digit())
|
||||
.map(|n| n.to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_forbidden_words(content: &str) -> Vec<&str> {
|
||||
fn check_forbidden(w: &str) -> bool {
|
||||
FORBIDDEN_WORDS.iter()
|
||||
.find(|fw| str::eq_ignore_ascii_case(w, fw))
|
||||
.is_some()
|
||||
}
|
||||
|
||||
get_words(content)
|
||||
.filter(|w| check_forbidden(w))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn analyze(data: &str) {
|
||||
let clean_data = clean_content(data);
|
||||
drop(clean_data); // You aren't actually using clean_data :O
|
||||
|
||||
// All capitalized words
|
||||
let cap_words = get_capitalized_words(data);
|
||||
println!("All capitalized words: {}", cap_words.len());
|
||||
|
||||
// All sentences
|
||||
let sentences = get_sentences(data);
|
||||
println!("Sentences: {}", sentences.len());
|
||||
|
||||
// All words
|
||||
let words = get_words(data);
|
||||
println!("Words: {}", words.clone().count());
|
||||
|
||||
// Numbers
|
||||
let numbers = get_numbers(data);
|
||||
println!("Numbers: {}", numbers.len());
|
||||
|
||||
// Forbidden words
|
||||
let fw = get_forbidden_words(data);
|
||||
println!("Forbidden words: {}", fw.len());
|
||||
|
||||
let word_count_per_sentence = words.count() / sentences.len();
|
||||
println!("Word count per sentence: {}", word_count_per_sentence);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// // Read in files from args
|
||||
// for arg in env::args() {
|
||||
// let Ok(text) = fs::read_to_string(arg) else {
|
||||
// eprintln!("{arg} isn't a valid file or couldn't be read");
|
||||
// continue;
|
||||
// };
|
||||
//
|
||||
// analyze(&text);
|
||||
// }
|
||||
|
||||
analyze(&SPAM1);
|
||||
}
|
||||
|
||||
static FORBIDDEN_WORDS: &'static [&'static str] = &[
|
||||
"recovery", "techie", "http", "https", "digital", "hack", "::", "//", "com",
|
||||
"@", "crypto", "bitcoin", "wallet", "hacker", "welcome", "whatsapp", "email", "cryptocurrency",
|
||||
"stolen", "freeze", "quick", "crucial", "tracing", "scammers", "expers", "hire", "century",
|
||||
"transaction", "essential", "managing", "contact", "contacting", "understanding", "assets", "funds"
|
||||
];
|
||||
|
||||
static SPAM1: &'static str = "HIRE Century Web Recovery TO RECOVER YOUR LOST BITCOIN
|
||||
If you’ve lost your Bitcoin to an online scam, hiring a professional recovery service can significantly improve your chances of getting your funds back. Century Web Recovery specializes in Bitcoin recovery, helping victims reclaim their stolen assets. Here’s what you need to know:
|
||||
|
||||
Understanding the Recovery Process
|
||||
The recovery process begins with contacting Century Web Recovery. Their team will guide you through the steps necessary to initiate an investigation into your case. Understanding the process is key to managing your expectations.
|
||||
|
||||
Documenting Your Case
|
||||
To facilitate recovery, it’s essential to document all relevant information regarding the scam. This includes transaction records, wallet addresses, and any communications with the scammer. Century Web Recovery will help you gather this information to build a strong case.
|
||||
|
||||
Investigation and Tracking
|
||||
Once you hire Century Web Recovery, their experts will begin investigating your case. They use sophisticated tools to track the stolen Bitcoin, identifying the paths taken by the scammers. This tracing is crucial for successful recovery.
|
||||
|
||||
Freezing Stolen Assets
|
||||
Quick action is vital in recovering stolen Bitcoin.Century Web Recovery works directly with cryptocurrency exchanges to freeze any stolen assets, preventing the scammers from cashing out your funds. This collaboration is essential for a successful recovery.
|
||||
|
||||
Legal Support and Guidance
|
||||
If necessary, Century Web Recovery can provide legal support. They will guide you on reporting the scam to law enforcement and assist in filing any legal claims. Their expertise in crypto-related cases ensures you receive the best advice on how to proceed.
|
||||
|
||||
If you’ve lost Bitcoin to an online scam, don’t hesitate. Hire Century Web Recovery to recover your lost assets and regain your financial security.";
|
||||
|
7
12bitfloat_rust/risspam/Cargo.lock
generated
Normal file
7
12bitfloat_rust/risspam/Cargo.lock
generated
Normal file
@ -0,0 +1,7 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "risspam"
|
||||
version = "0.1.0"
|
6
12bitfloat_rust/risspam/Cargo.toml
Normal file
6
12bitfloat_rust/risspam/Cargo.toml
Normal file
@ -0,0 +1,6 @@
|
||||
[package]
|
||||
name = "risspam"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
121
12bitfloat_rust/risspam/src/main.rs
Normal file
121
12bitfloat_rust/risspam/src/main.rs
Normal file
@ -0,0 +1,121 @@
|
||||
#![feature(let_chains)]
|
||||
|
||||
use std::{env, fs};
|
||||
|
||||
fn clean_content(content: &str) -> String {
|
||||
let alloed_ichars = "01234567891abcdefghijklmnopqrstuvwxyz \n.,!?";
|
||||
|
||||
let clean_content = content.chars()
|
||||
.filter(|&c| alloed_ichars.contains(c))
|
||||
.collect::<String>();
|
||||
|
||||
clean_content
|
||||
}
|
||||
|
||||
fn get_sentences(content: &str) -> Vec<&str> {
|
||||
let mut sentences = content.split('.')
|
||||
.map(|s| s.trim_start()) // Remove leading whitespace
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Remove last "sentence" if didn't end with a dot
|
||||
if let Some(last) = sentences.last() && !last.ends_with('.') {
|
||||
sentences.pop();
|
||||
}
|
||||
|
||||
sentences
|
||||
}
|
||||
|
||||
fn get_words(sentences: &str) -> impl Iterator<Item = &str> + Clone {
|
||||
sentences.split_whitespace()
|
||||
}
|
||||
|
||||
fn is_fully_capitalized_word(word: &str) -> bool {
|
||||
word.chars()
|
||||
.all(|c| !c.is_ascii_alphanumeric() || c.is_ascii_uppercase())
|
||||
}
|
||||
|
||||
fn get_capitalized_words(content: &str) -> Vec<&str> {
|
||||
let sentences = get_sentences(content);
|
||||
let mut cap_words = vec![];
|
||||
|
||||
for sentence in sentences {
|
||||
// Always skip the first word since sentences start with
|
||||
for word in get_words(sentence).skip(1) {
|
||||
if is_fully_capitalized_word(word) {
|
||||
cap_words.push(word);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cap_words
|
||||
}
|
||||
|
||||
fn get_numbers(content: &str) -> Vec<String> {
|
||||
let clean = clean_content(content);
|
||||
|
||||
clean.split(|c: char| c.is_ascii_digit())
|
||||
.map(|n| n.to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_forbidden_words(content: &str) -> Vec<&str> {
|
||||
fn check_forbidden(w: &str) -> bool {
|
||||
FORBIDDEN_WORDS.iter()
|
||||
.find(|fw| str::eq_ignore_ascii_case(w, fw))
|
||||
.is_some()
|
||||
}
|
||||
|
||||
get_words(content)
|
||||
.filter(|w| check_forbidden(w))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn analyze(data: &str) {
|
||||
let clean_data = clean_content(data);
|
||||
drop(clean_data); // You aren't actually using clean_data :O
|
||||
|
||||
// All capitalized words
|
||||
let cap_words = get_capitalized_words(data);
|
||||
println!("All capitalized words: {}", cap_words.len());
|
||||
|
||||
// All sentences
|
||||
let sentences = get_sentences(data);
|
||||
println!("Sentences: {}", sentences.len());
|
||||
|
||||
// All words
|
||||
let words = get_words(data);
|
||||
println!("Words: {}", words.clone().count());
|
||||
|
||||
// Numbers
|
||||
let numbers = get_numbers(data);
|
||||
println!("Numbers: {}", numbers.len());
|
||||
|
||||
// Forbidden words
|
||||
let fw = get_forbidden_words(data);
|
||||
println!("Forbidden words: {}", fw.len());
|
||||
|
||||
let word_count_per_sentence = words.count() / sentences.len();
|
||||
println!("Word count per sentence: {}", word_count_per_sentence);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Read in files from args
|
||||
for arg in env::args().skip(1) { // skip program arg
|
||||
let Ok(text) = fs::read_to_string(&arg) else {
|
||||
eprintln!("{arg} isn't a valid file or couldn't be read");
|
||||
continue;
|
||||
};
|
||||
|
||||
analyze(&text);
|
||||
}
|
||||
|
||||
// analyze(&SPAM1);
|
||||
}
|
||||
|
||||
static FORBIDDEN_WORDS: &'static [&'static str] = &[
|
||||
"recovery", "techie", "http", "https", "digital", "hack", "::", "//", "com",
|
||||
"@", "crypto", "bitcoin", "wallet", "hacker", "welcome", "whatsapp", "email", "cryptocurrency",
|
||||
"stolen", "freeze", "quick", "crucial", "tracing", "scammers", "expers", "hire", "century",
|
||||
"transaction", "essential", "managing", "contact", "contacting", "understanding", "assets", "funds"
|
||||
];
|
||||
|
1
12bitfloat_rust/risspam/target/.rustc_info.json
Normal file
1
12bitfloat_rust/risspam/target/.rustc_info.json
Normal file
@ -0,0 +1 @@
|
||||
{"rustc_fingerprint":10200614701319076238,"outputs":{"4614504638168534921":{"success":true,"status":"","code":0,"stdout":"rustc 1.85.0-nightly (d10a6823f 2024-11-29)\nbinary: rustc\ncommit-hash: d10a6823f4c3176be7a05a2454e5c33b861cb05f\ncommit-date: 2024-11-29\nhost: x86_64-unknown-linux-gnu\nrelease: 1.85.0-nightly\nLLVM version: 19.1.4\n","stderr":""},"15729799797837862367":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/retoor/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\nfmt_debug=\"full\"\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_has_atomic\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"unknown\"\nub_checks\nunix\n","stderr":""}},"successes":{}}
|
3
12bitfloat_rust/risspam/target/CACHEDIR.TAG
Normal file
3
12bitfloat_rust/risspam/target/CACHEDIR.TAG
Normal file
@ -0,0 +1,3 @@
|
||||
Signature: 8a477f597d28d172789f06886806bc55
|
||||
# This file is a cache directory tag created by cargo.
|
||||
# For information about cache directory tags see https://bford.info/cachedir/
|
0
12bitfloat_rust/risspam/target/release/.cargo-lock
Normal file
0
12bitfloat_rust/risspam/target/release/.cargo-lock
Normal file
@ -0,0 +1 @@
|
||||
bcbff30c781130f6
|
@ -0,0 +1 @@
|
||||
{"rustc":7959095874983568062,"features":"[]","declared_features":"[]","target":10519780086885261595,"profile":18277820415669657429,"path":10602529704205407992,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"release/.fingerprint/risspam-588b7c16f8fda172/dep-bin-risspam","checksum":false}}],"rustflags":[],"metadata":7797948686568424061,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
@ -0,0 +1 @@
|
||||
This file has an mtime of when this was started.
|
@ -0,0 +1 @@
|
||||
This file has an mtime of when this was started.
|
@ -0,0 +1,3 @@
|
||||
{"$message_type":"diagnostic","message":"`#![feature]` may not be used on the stable release channel","code":{"code":"E0554","explanation":"Feature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nErroneous code example:\n\n```ignore (depends on release channel)\n#![feature(lang_items)] // error: `#![feature]` may not be used on the\n // stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n"},"level":"error","spans":[{"file_name":"src/main.rs","byte_start":0,"byte_end":23,"line_start":1,"line_end":1,"column_start":1,"column_end":24,"is_primary":true,"text":[{"text":"#![feature(let_chains)]","highlight_start":1,"highlight_end":24}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[],"rendered":"\u001b[0m\u001b[1m\u001b[38;5;9merror[E0554]\u001b[0m\u001b[0m\u001b[1m: `#![feature]` may not be used on the stable release channel\u001b[0m\n\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m--> \u001b[0m\u001b[0msrc/main.rs:1:1\u001b[0m\n\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m\n\u001b[0m\u001b[1m\u001b[38;5;12m1\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m\u001b[0m \u001b[0m\u001b[0m#![feature(let_chains)]\u001b[0m\n\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;9m^^^^^^^^^^^^^^^^^^^^^^^\u001b[0m\n\n"}
|
||||
{"$message_type":"diagnostic","message":"aborting due to 1 previous error","code":null,"level":"error","spans":[],"children":[],"rendered":"\u001b[0m\u001b[1m\u001b[38;5;9merror\u001b[0m\u001b[0m\u001b[1m: aborting due to 1 previous error\u001b[0m\n\n"}
|
||||
{"$message_type":"diagnostic","message":"For more information about this error, try `rustc --explain E0554`.","code":null,"level":"failure-note","spans":[],"children":[],"rendered":"\u001b[0m\u001b[1mFor more information about this error, try `rustc --explain E0554`.\u001b[0m\n"}
|
BIN
12bitfloat_rust/risspam/target/release/deps/risspam-588b7c16f8fda172
Executable file
BIN
12bitfloat_rust/risspam/target/release/deps/risspam-588b7c16f8fda172
Executable file
Binary file not shown.
@ -0,0 +1,5 @@
|
||||
/home/retoor/projects/spam/rust/risspam/target/release/deps/risspam-588b7c16f8fda172: src/main.rs
|
||||
|
||||
/home/retoor/projects/spam/rust/risspam/target/release/deps/risspam-588b7c16f8fda172.d: src/main.rs
|
||||
|
||||
src/main.rs:
|
@ -0,0 +1,5 @@
|
||||
/home/retoor/projects/spam/rust/risspam/target/release/deps/risspam-edd96cae17e7d87c: src/main.rs
|
||||
|
||||
/home/retoor/projects/spam/rust/risspam/target/release/deps/risspam-edd96cae17e7d87c.d: src/main.rs
|
||||
|
||||
src/main.rs:
|
BIN
12bitfloat_rust/risspam/target/release/risspam
Executable file
BIN
12bitfloat_rust/risspam/target/release/risspam
Executable file
Binary file not shown.
1
12bitfloat_rust/risspam/target/release/risspam.d
Normal file
1
12bitfloat_rust/risspam/target/release/risspam.d
Normal file
@ -0,0 +1 @@
|
||||
/home/retoor/projects/spam/rust/risspam/target/release/risspam: /home/retoor/projects/spam/rust/risspam/src/main.rs
|
Loading…
Reference in New Issue
Block a user