Compare commits

..

4 Commits

Author SHA1 Message Date
Aravinth Manivannan
6f029d2945 fix: associate challenges with usernames 2023-06-17 16:13:48 +05:30
Aravinth Manivannan
e7b01a5b06 feat: impl auth challenges interfaces for pg 2023-06-13 19:26:21 +05:30
Aravinth Manivannan
0cfffed52e feat: impl auth challenges interfaces for mariadb 2023-06-13 19:24:03 +05:30
Aravinth Manivannan
c53fe2e3ff feat: define internfaces to create,fetch and rm auth challenges 2023-06-13 19:23:23 +05:30
54 changed files with 4153 additions and 24089 deletions

View File

@@ -63,7 +63,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: "18.0.0"
node-version: "14.x"
- name: Build frontend
run: make frontend

View File

@@ -52,7 +52,7 @@ jobs:
maria:
image: mariadb:10
image: mariadb
env:
MARIADB_USER: "maria"
MARIADB_PASSWORD: "password"
@@ -90,7 +90,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: "18.0.0"
node-version: "16.x"
- name: Install ${{ matrix.version }}
uses: actions-rs/toolchain@v1
@@ -103,7 +103,7 @@ jobs:
run: make frontend
- name: Run the frontend tests
run: make test.frontend
run: make frontend-test
- name: Run migrations
run: make migrate

View File

@@ -54,7 +54,7 @@ jobs:
- 10025:1025
maria:
image: mariadb:10
image: mariadb
env:
MARIADB_USER: "maria"
MARIADB_PASSWORD: "password"
@@ -95,7 +95,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: "18.0.0"
node-version: "16.x"
- name: Install ${{ matrix.version }}
uses: actions-rs/toolchain@v1

1210
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -59,8 +59,8 @@ log = "0.4"
lazy_static = "1.4"
libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.3" }
#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.2" }
libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
#libmcaptcha = { path = "../libmcaptcha", features = ["full"]}
rand = "0.8"
@@ -79,7 +79,6 @@ lettre = { version = "0.10.0-rc.3", features = [
]}
openssl = { version = "0.10.48", features = ["vendored"] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dependencies.db-core]

View File

@@ -1,4 +1,4 @@
FROM node:18.0.0 as frontend
FROM node:16.0.0 as frontend
RUN set -ex; \
apt-get update; \
DEBIAN_FRONTEND=noninteractive \

135
Makefile
View File

@@ -2,39 +2,6 @@ BUNDLE = static/cache/bundle
OPENAPI = docs/openapi
CLEAN_UP = $(BUNDLE) src/cache_buster_data.json assets
define deploy_dependencies ## deploy dependencies
@-docker create --name ${db} \
-e POSTGRES_PASSWORD=password \
-p 5432:5432 \
postgres
@-docker create \
-p 3306:3306 \
--name ${mdb} \
--env MARIADB_USER=maria \
--env MARIADB_PASSWORD=password \
--env MARIADB_ROOT_PASSWORD=password \
--env MARIADB_DATABASE=maria \
mariadb:latest
@-docker create \
-p 6379:6379 \
--name mcaptcha-cache \
mcaptcha/cache:latest
docker start ${db}
docker start ${mdb}
docker start mcaptcha-cache
endef
define run_migrations ## run database migrations
cd db/db-migrations/ && cargo run
endef
define run_dev_migrations ## run database migrations
cd db/db-sqlx-maria/ && \
DATABASE_URL=${MARIA_DATABASE_URL} sqlx migrate run
cd db/db-sqlx-postgres/ && \
DATABASE_URL=${POSTGRES_DATABASE_URL} sqlx migrate run
endef
define frontend_env ## install frontend deps
yarn install
cd docs/openapi && yarn install
@@ -44,30 +11,6 @@ define cache_bust ## run cache_busting program
cd utils/cache-bust && cargo run
endef
define test_frontend ## run frontend tests
cd $(OPENAPI)&& yarn test
yarn test
endef
define test_db_sqlx_postgres
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
endef
define test_db_sqlx_maria
cd db/db-sqlx-maria &&\
DATABASE_URL=${MARIA_DATABASE_URL}\
cargo test --no-fail-fast
endef
define test_core
cargo test --no-fail-fast
endef
default: frontend ## Build app in debug mode
$(call cache_bust)
cargo build
@@ -92,6 +35,10 @@ clean: ## Delete build artifacts
@yarn cache clean
@-rm $(CLEAN_UP)
coverage: migrate ## Generate code coverage report in HTML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Html
doc: ## Generate documentation
#yarn doc
cargo doc --no-deps --workspace --all-features
@@ -107,19 +54,6 @@ env: ## Setup development environtment
cargo fetch
$(call frontend_env)
env.db: ## Deploy dependencies
$(call deploy_dependencies)
sleep 5
$(call run_migrations)
env.db.recreate: ## Deploy dependencies from scratch
@-docker rm -f ${db}
@-docker rm -f ${mdb}
@-docker rm -f mcaptcha-cache
$(call deploy_dependencies)
sleep 5
$(call run_migrations)
frontend-env: ## Install frontend deps
$(call frontend_env)
@@ -142,6 +76,10 @@ frontend: ## Build frontend
@./scripts/librejs.sh
@./scripts/cachebust.sh
frontend-test: ## Run frontend tests
cd $(OPENAPI)&& yarn test
yarn test
lint: ## Lint codebase
cargo fmt -v --all -- --emit files
cargo clippy --workspace --tests --all-features
@@ -149,10 +87,7 @@ lint: ## Lint codebase
cd $(OPENAPI)&& yarn test
migrate: ## Run database migrations
$(call run_migrations)
migrate.dev: ## Run database migrations during development
$(call run_dev_migrations)
cd db/db-migrations/ && cargo run
release: frontend ## Build app with release optimizations
$(call cache_bust)
@@ -163,49 +98,37 @@ run: frontend ## Run app in debug mode
cargo run
db.sqlx.offline: ## prepare sqlx offline data
sqlx-offline-data: ## prepare sqlx offline data
cd db/db-sqlx-postgres && cargo sqlx prepare \
--database-url=${POSTGRES_DATABASE_URL} -- \
--all-features
cd db/db-sqlx-maria && cargo sqlx prepare \
--database-url=${MARIA_DATABASE_URL} -- \
--all-features
# cd db/db-sqlx-sqlite/ \
# && DATABASE_URL=${SQLITE_DATABASE_URL} cargo sqlx prepare
test: frontend ## Run all available tests
$(call test_frontend)
test-db: ## run tests on database
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
test: frontend-test frontend ## Run all available tests
$(call cache_bust)
$(call test_db_sqlx_postgres)
$(call test_db_sqlx_maria)
$(call test_core)
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
cd db/db-sqlx-maria &&\
DATABASE_URL=${MARIA_DATABASE_URL}\
cargo test --no-fail-fast
cargo test --no-fail-fast
# ./scripts/tests.sh
test.cov.html: migrate ## Generate code coverage report in HTML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Html
test.cov.xml: migrate ## Generate code coverage report in XML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Xml
test.core: ## Run all core tests
$(call test_core)
test.db: ## Run all database driver tests
$(call test_db_sqlx_postgres)
$(call test_db_sqlx_maria)
test.db.pg: ## Run Postgres database driver tests
$(call test_db_sqlx_postgres)
test.db.maria: ## Run Maria database driver tests
$(call test_db_sqlx_maria)
test.frontend: ## Run frontend tests
$(call test_frontend)
test.integration: ## run integration tests with nightwatch.js
./scripts/integration.sh
xml-test-coverage: migrate ## Generate code coverage report in XML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Xml
help: ## Prints help for targets with comments
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-].+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'

View File

@@ -114,7 +114,7 @@ After the containers are up, visit [http://localhost:7000](http://localhost:7000
It takes a while to build the image so please be patient :)
See [DEPLOYMENT.md](./docs/DEPLOYMENT.md) for detailed alternate deployment
See [DEPLOYMENT.md](./docs/DEPLOYMENT.md) detailed alternate deployment
methods.
## Development:

View File

@@ -21,7 +21,7 @@ use sqlx::types::time::OffsetDateTime;
fn main() {
// note: add error checking yourself.
let output = Command::new("git")
.args(["rev-parse", "HEAD"])
.args(&["rev-parse", "HEAD"])
.output()
.unwrap();
let git_hash = String::from_utf8(output.stdout).unwrap();

View File

@@ -13,8 +13,9 @@ async-trait = "0.1.51"
thiserror = "1.0.30"
serde = { version = "1", features = ["derive"]}
url = { version = "2.2.2", features = ["serde"] }
libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.3"}
#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.2"}
libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
uuid = { version = "1.3.3", features = ["v4", "serde"] }
[features]
default = []

View File

@@ -31,7 +31,10 @@
//! - [errors](crate::auth): error data structures used in this crate
//! - [ops](crate::ops): meta operations like connection pool creation, migrations and getting
//! connection from pool
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub use libmcaptcha::defense::Level;
@@ -97,6 +100,73 @@ pub struct NameHash {
pub hash: String,
}
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
/// Email challenge reason
pub enum ChallengeReason {
/// challenge created to verify a newly registered user
EmailVerification,
/// Challenge created to verify a password reset request
PasswordReset,
}
impl ChallengeReason {
pub fn to_str(&self) -> &'static str {
match self {
Self::EmailVerification => "email_verification",
Self::PasswordReset => "password_resset",
}
}
}
impl ToString for ChallengeReason {
fn to_string(&self) -> String {
self.to_str().into()
}
}
impl FromStr for ChallengeReason {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
for reason in [Self::PasswordReset, Self::EmailVerification].iter() {
if s == reason.to_str() {
return Ok(reason.clone());
}
}
Err(())
}
}
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
/// Minimal user representation for use in challenge verification
pub struct ChallengeUser {
/// username of the user
pub username: String,
/// email ID of the user
pub email: String,
}
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
/// Email challenge
pub struct Challenge {
/// challenge unique identifier
pub challenge: Uuid,
/// reason why the challenge was create
pub reason: ChallengeReason,
}
impl Challenge {
/// create new Challenge instance for a given reason. Challenge text is auto-generated
pub fn new(reason: ChallengeReason) -> Self {
let challenge = Uuid::new_v4();
Self { challenge, reason }
}
/// Generate new ID (useful when ID clashes)
pub fn new_id(&mut self) {
self.challenge = Uuid::new_v4();
}
}
#[async_trait]
/// mCaptcha's database requirements. To implement support for $Database, kindly implement this
/// trait.
@@ -251,80 +321,18 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase {
/// fetch PoWConfig confirms
async fn fetch_confirm(&self, user: &str, key: &str) -> DBResult<Vec<i64>>;
/// record PoW timing
async fn analysis_save(
/// Record challenge in database
async fn new_challenge(&self, user: &str, challenge: &mut Challenge)
-> DBResult<()>;
/// Record challenge in database
async fn fetch_challenge_user(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
) -> DBResult<()>;
challenge: &Challenge,
) -> DBResult<ChallengeUser>;
/// fetch PoW analytics
async fn analytics_fetch(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>>;
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()>;
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String>;
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
&self,
psuedo_id: &str,
) -> DBResult<String>;
/// Delete all records for campaign
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()>;
/// Get publishing status of pow analytics for captcha ID/ campaign ID
async fn analytics_captcha_is_published(&self, campaign_id: &str) -> DBResult<bool> {
match self
.analytics_get_psuedo_id_from_capmaign_id(campaign_id)
.await
{
Ok(_) => Ok(true),
Err(errors::DBError::CaptchaNotFound) => Ok(false),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
/// Log Proof-of-Work CAPTCHA performance analytics
pub struct CreatePerformanceAnalytics {
/// time taken to generate proof
pub time: u32,
/// difficulty factor for which the proof was generated
pub difficulty_factor: u32,
/// worker/client type: wasm, javascript, python, etc.
pub worker_type: String,
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
/// Proof-of-Work CAPTCHA performance analytics
pub struct PerformanceAnalytics {
/// log ID
pub id: usize,
/// time taken to generate proof
pub time: u32,
/// difficulty factor for which the proof was generated
pub difficulty_factor: u32,
/// worker/client type: wasm, javascript, python, etc.
pub worker_type: String,
/// Delete a challenge from database
async fn delete_challenge(&self, challenge: &Challenge) -> DBResult<()>;
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
@@ -407,6 +415,7 @@ pub struct Secret {
/// user's secret
pub secret: String,
}
/// Trait to clone MCDatabase
pub trait CloneSPDatabase {
/// clone DB

View File

@@ -260,60 +260,6 @@ pub async fn database_works<'a, T: MCDatabase>(
db.record_solve(c.key).await.unwrap();
db.record_confirm(c.key).await.unwrap();
// analytics start
db.analytics_create_psuedo_id_if_not_exists(c.key)
.await
.unwrap();
let psuedo_id = db
.analytics_get_psuedo_id_from_capmaign_id(c.key)
.await
.unwrap();
db.analytics_create_psuedo_id_if_not_exists(c.key)
.await
.unwrap();
assert_eq!(
psuedo_id,
db.analytics_get_psuedo_id_from_capmaign_id(c.key)
.await
.unwrap()
);
assert_eq!(
c.key,
db.analytics_get_capmaign_id_from_psuedo_id(&psuedo_id)
.await
.unwrap()
);
let analytics = CreatePerformanceAnalytics {
time: 0,
difficulty_factor: 0,
worker_type: "wasm".into(),
};
db.analysis_save(c.key, &analytics).await.unwrap();
let limit = 50;
let mut offset = 0;
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
assert_eq!(a[0].time, analytics.time);
assert_eq!(a[0].difficulty_factor, analytics.difficulty_factor);
assert_eq!(a[0].worker_type, analytics.worker_type);
offset += 1;
assert!(db
.analytics_fetch(c.key, limit, offset)
.await
.unwrap()
.is_empty());
db.analytics_delete_all_records_for_campaign(c.key)
.await
.unwrap();
assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0);
assert!(!db.analytics_captcha_is_published(c.key).await.unwrap());
db.analytics_delete_all_records_for_campaign(c.key)
.await
.unwrap();
// analytics end
assert_eq!(db.fetch_solve(p.username, c.key).await.unwrap().len(), 1);
assert_eq!(
db.fetch_config_fetched(p.username, c.key)
@@ -349,4 +295,13 @@ pub async fn database_works<'a, T: MCDatabase>(
// delete captcha; updated key = p.username so invoke delete with it
db.delete_captcha(p.username, p.username).await.unwrap();
assert!(!db.captcha_exists(Some(p.username), c.key).await.unwrap());
let mut challenge = Challenge::new(ChallengeReason::PasswordReset);
db.new_challenge(p.username, &mut challenge).await.unwrap();
db.new_challenge(p.username, &mut challenge).await.unwrap();
let c = db.fetch_challenge_user(&challenge).await.unwrap();
assert_eq!(c.username, p.username);
assert_eq!(&c.email, p.email.as_ref().unwrap());
db.delete_challenge(&challenge).await.unwrap();
assert!(db.fetch_challenge_user(&challenge).await.is_err())
}

View File

@@ -13,7 +13,6 @@ async-trait = "0.1.51"
db-core = {path = "../db-core"}
futures = "0.3.15"
sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "mysql", "time", "offline" ] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dev-dependencies]
actix-rt = "2"

View File

@@ -0,0 +1,27 @@
CREATE TABLE IF NOT EXISTS mcaptcha_challenge_reason (
id INT auto_increment,
PRIMARY KEY(id),
name VARCHAR(40) NOT NULL UNIQUE
);
CREATE TABLE IF NOT EXISTS mcaptcha_challenge (
id INT auto_increment,
PRIMARY KEY(id),
reason INT NOT NULL,
challenge_id varchar(40) NOT NULL UNIQUE,
received timestamp NOT NULL DEFAULT now(),
user_id INT NOT NULL,
CONSTRAINT `fk_mcaptcha_challenge_user`
FOREIGN KEY (user_id)
REFERENCES mcaptcha_users (ID)
ON DELETE CASCADE
ON UPDATE CASCADE,
CONSTRAINT `fk_mcaptcha_mcaptcha_challenge_reason`
FOREIGN KEY (reason)
REFERENCES mcaptcha_challenge_reason (id)
ON DELETE CASCADE
ON UPDATE CASCADE
);

View File

@@ -1,13 +0,0 @@
CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics (
ID INT auto_increment,
PRIMARY KEY(ID),
config_id INTEGER NOT NULL,
time INTEGER NOT NULL,
difficulty_factor INTEGER NOT NULL,
worker_type VARCHAR(100) NOT NULL,
CONSTRAINT `fk_mcaptcha_config_id_pow_analytics`
FOREIGN KEY (config_id)
REFERENCES mcaptcha_config (config_id)
ON DELETE CASCADE
ON UPDATE CASCADE
);

View File

@@ -1,13 +0,0 @@
CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id (
ID INT auto_increment,
PRIMARY KEY(ID),
psuedo_id varchar(100) NOT NULL UNIQUE,
config_id INT NOT NULL,
CONSTRAINT `fk_mcaptcha_psuedo_campaign_id_config_id`
FOREIGN KEY (config_id)
REFERENCES mcaptcha_config (config_id)
ON DELETE CASCADE
ON UPDATE CASCADE
);

View File

@@ -1,5 +1,53 @@
{
"db": "MySQL",
"04e79a67bc8c1b18eca95fc4d2602ed5dd41b6d864796f034540efec3da05fa8": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "INSERT IGNORE INTO\n mcaptcha_challenge_reason (name)\n VALUES (?)"
},
"12a7d765fb683c8134d032563f2d101e2fd70c261e71696e7a90387507e0ef43": {
"describe": {
"columns": [
{
"name": "name",
"ordinal": 0,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4101
},
"max_size": 400,
"type": "VarString"
}
},
{
"name": "email",
"ordinal": 1,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false,
true
],
"parameters": {
"Right": 2
}
},
"query": "SELECT name, email\n FROM mcaptcha_users\n WHERE ID = (SELECT user_id \n FROM mcaptcha_challenge\n WHERE\n challenge_id = ?\n AND reason = (\n SELECT id FROM mcaptcha_challenge_reason WHERE name = ?\n )\n );"
},
"1367dceb151a766a901b5dd771d0b75d0bc61d2fef17a94a90c8ffa0065e2c44": {
"describe": {
"columns": [
@@ -25,31 +73,6 @@
},
"query": "SELECT time FROM mcaptcha_pow_confirmed_stats \n WHERE \n config_id = (\n SELECT config_id FROM mcaptcha_config \n WHERE \n captcha_key = ?\n AND\n user_id = (\n SELECT \n ID FROM mcaptcha_users WHERE name = ?))\n ORDER BY time DESC"
},
"14dc89b2988b221fd24e4f319b1d48f5e6c65c760c30d11c9c29087f09cee23a": {
"describe": {
"columns": [
{
"name": "captcha_key",
"ordinal": 0,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4101
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "SELECT\n captcha_key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = ?\n );"
},
"22e697114c3ed5b0156cdceab11a398f1ef3a804f482e1cd948bc615ef95fc92": {
"describe": {
"columns": [],
@@ -179,31 +202,6 @@
},
"query": "INSERT INTO mcaptcha_pow_fetched_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)"
},
"5ad1ef722a961183228d851813b9f50284520bf8cc8118c765b72c108daaf6fb": {
"describe": {
"columns": [
{
"name": "psuedo_id",
"ordinal": 0,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4101
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?));\n "
},
"5d5a106981345e9f62bc2239c00cdc683d3aaaa820d63da300dc51e3f6f363d3": {
"describe": {
"columns": [],
@@ -214,16 +212,6 @@
},
"query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES (?, ?, ?)"
},
"6094468b7fa20043b0da90e366b7f1fa29a8c748e163b6712725440b25ae9361": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) "
},
"66ec7df10484f8e0206f3c97afc9136021589556c38dbbed341d6574487f79f2": {
"describe": {
"columns": [
@@ -307,6 +295,16 @@
},
"query": "SELECT difficulty_factor, visitor_threshold FROM mcaptcha_levels WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config where captcha_key= (?)\n ) ORDER BY difficulty_factor ASC;"
},
"740ed2dab8c07c718c1b0e8e4262251bbf2501cdebfc4872fb903f70ec3d0dc8": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 4
}
},
"query": "INSERT INTO mcaptcha_challenge (challenge_id, received, reason, user_id)\n VALUES (?, ?,\n (SELECT id FROM mcaptcha_challenge_reason WHERE name = ?),\n (SELECT id FROM mcaptcha_users WHERE name = ?)\n );\n "
},
"74d68a86f852d3d85957e94ed04e8acd8e6144744f7b13e383ebcb2bcf3360ae": {
"describe": {
"columns": [],
@@ -466,80 +464,6 @@
},
"query": "UPDATE mcaptcha_users set email = ?\n WHERE name = ?"
},
"9e45969a0f79eab8caba41b0d91e5e3b85a1a68a49136f89fc90793c38f00041": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 2
}
},
"query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)),\n ?\n );"
},
"9f10afb0f242f11c58389803c5e85e244cc59102b8929a21e3fcaa852d57a52c": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": {
"char_set": 63,
"flags": {
"bits": 515
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "time",
"ordinal": 1,
"type_info": {
"char_set": 63,
"flags": {
"bits": 4097
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "difficulty_factor",
"ordinal": 2,
"type_info": {
"char_set": 63,
"flags": {
"bits": 4097
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "worker_type",
"ordinal": 3,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4097
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false,
false,
false,
false
],
"parameters": {
"Right": 3
}
},
"query": "SELECT\n id, time, difficulty_factor, worker_type\n FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) \n ORDER BY ID\n LIMIT ? OFFSET ?"
},
"a89c066db044cddfdebee6a0fd0d80a5a26dcb7ecc00a9899f5634b72ea0a952": {
"describe": {
"columns": [
@@ -893,16 +817,6 @@
},
"query": "INSERT INTO mcaptcha_pow_solved_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)"
},
"e4d9bf156a368dcee1433dd5ced9f1991aa15f84e0ade916433aada40f68f0aa": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n );"
},
"e6569a6064d0e07abea4c0bd4686cdfdaac64f0109ac40efaed06a744a2eaf5e": {
"describe": {
"columns": [
@@ -1017,15 +931,15 @@
},
"query": "SELECT name, password FROM mcaptcha_users WHERE email = ?"
},
"f987c4568ab28271d87af47f473b18cf41130a483333e81d5f50199758cbb98b": {
"f47c05c0a7da41a2176f08a44c6c945dabb84558a4d09369b6108bfce8b9d2bf": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 4
"Right": 2
}
},
"query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)"
"query": "DELETE\n FROM mcaptcha_challenge\n WHERE\n challenge_id = ?\n AND reason = (SELECT id FROM mcaptcha_challenge_reason WHERE name = ?);"
},
"fc717ff0827ccfaa1cc61a71cc7f71c348ebb03d35895c54b011c03121ad2385": {
"describe": {

View File

@@ -22,7 +22,6 @@ use sqlx::mysql::MySqlPoolOptions;
use sqlx::types::time::OffsetDateTime;
use sqlx::ConnectOptions;
use sqlx::MySqlPool;
use uuid::Uuid;
pub mod errors;
#[cfg(test)]
@@ -96,6 +95,22 @@ impl Migrate for Database {
.run(&self.pool)
.await
.map_err(|e| DBError::DBError(Box::new(e)))?;
for reason in [
ChallengeReason::EmailVerification,
ChallengeReason::PasswordReset,
] {
sqlx::query!(
"INSERT IGNORE INTO
mcaptcha_challenge_reason (name)
VALUES (?)",
reason.to_str()
)
.execute(&self.pool)
.await
.map_err(|e| DBError::DBError(Box::new(e)))?;
}
Ok(())
}
}
@@ -897,188 +912,90 @@ impl MCDatabase for Database {
Ok(Date::dates_to_unix(records))
}
/// record PoW timing
async fn analysis_save(
/// Record challenge in database
async fn new_challenge(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
user: &str,
challenge: &mut Challenge,
) -> DBResult<()> {
let _ = sqlx::query!(
"INSERT INTO mcaptcha_pow_analytics
(config_id, time, difficulty_factor, worker_type)
VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)",
captcha_id,
d.time as i32,
d.difficulty_factor as i32,
&d.worker_type,
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// fetch PoW analytics
async fn analytics_fetch(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>> {
struct P {
id: i32,
time: i32,
difficulty_factor: i32,
worker_type: String,
}
impl From<P> for PerformanceAnalytics {
fn from(v: P) -> Self {
Self {
id: v.id as usize,
time: v.time as u32,
difficulty_factor: v.difficulty_factor as u32,
worker_type: v.worker_type,
}
}
}
let mut c = sqlx::query_as!(
P,
"SELECT
id, time, difficulty_factor, worker_type
FROM
mcaptcha_pow_analytics
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
)
ORDER BY ID
LIMIT ? OFFSET ?",
&captcha_id,
limit as i64,
offset as i64,
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
let mut res = Vec::with_capacity(c.len());
for i in c.drain(0..) {
res.push(i.into())
}
Ok(res)
}
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()> {
let id = Uuid::new_v4();
sqlx::query!(
"
INSERT INTO
mcaptcha_psuedo_campaign_id (config_id, psuedo_id)
VALUES (
(SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)),
?
);",
captcha_id,
&id.to_string(),
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String> {
struct ID {
psuedo_id: String,
}
let res = sqlx::query_as!(
ID,
"SELECT psuedo_id FROM
mcaptcha_psuedo_campaign_id
WHERE
config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?));
let now = now_unix_time_stamp();
loop {
let res = sqlx::query!(
"INSERT INTO mcaptcha_challenge (challenge_id, received, reason, user_id)
VALUES (?, ?,
(SELECT id FROM mcaptcha_challenge_reason WHERE name = ?),
(SELECT id FROM mcaptcha_users WHERE name = ?)
);
",
captcha_id
).fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
&challenge.challenge.to_string(),
now,
challenge.reason.to_str(),
user
)
.execute(&self.pool)
.await;
if let Err(Error::Database(err)) = res {
use std::borrow::Cow;
Ok(res.psuedo_id)
if err.code() == Some(Cow::from("23505")) {
let msg = err.message();
if msg.contains("for key 'challenge_id'") {
challenge.new_id();
continue;
}
}
}
break;
}
Ok(())
}
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
/// Record challenge in database
async fn fetch_challenge_user(
&self,
psuedo_id: &str,
) -> DBResult<String> {
struct ID {
captcha_key: String,
challenge: &Challenge,
) -> DBResult<ChallengeUser> {
struct C {
name: String,
email: Option<String>,
}
let res = sqlx::query_as!(
ID,
"SELECT
captcha_key
FROM
mcaptcha_config
C,
"SELECT name, email
FROM mcaptcha_users
WHERE ID = (SELECT user_id
FROM mcaptcha_challenge
WHERE
config_id = (
SELECT
config_id
FROM
mcaptcha_psuedo_campaign_id
WHERE
psuedo_id = ?
challenge_id = ?
AND reason = (
SELECT id FROM mcaptcha_challenge_reason WHERE name = ?
)
);",
psuedo_id
&challenge.challenge.to_string(),
challenge.reason.to_str(),
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.captcha_key)
.map_err(map_register_err)?;
Ok(ChallengeUser {
username: res.name,
email: res.email.unwrap(),
})
}
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()> {
/// Delete a challenge from database
async fn delete_challenge(&self, challenge: &Challenge) -> DBResult<()> {
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_psuedo_campaign_id
WHERE config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
);",
campaign_id
)
.execute(&self.pool)
.await;
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_pow_analytics
"DELETE
FROM mcaptcha_challenge
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
) ",
campaign_id
challenge_id = ?
AND reason = (SELECT id FROM mcaptcha_challenge_reason WHERE name = ?);",
&challenge.challenge.to_string(),
challenge.reason.to_str(),
)
.execute(&self.pool)
.await;
Ok(())
}
}

View File

@@ -13,7 +13,6 @@ async-trait = "0.1.51"
db-core = {path = "../db-core"}
futures = "0.3.15"
sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "postgres", "time", "offline" ] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dev-dependencies]
actix-rt = "2"

View File

@@ -0,0 +1,12 @@
CREATE TABLE IF NOT EXISTS mcaptcha_challenge_reason (
id SERIAL PRIMARY KEY NOT NULL,
name VARCHAR(40) NOT NULL UNIQUE
);
CREATE TABLE IF NOT EXISTS mcaptcha_challenge (
id SERIAL PRIMARY KEY NOT NULL,
reason INTEGER NOT NULL references mcaptcha_challenge_reason(ID) ON DELETE CASCADE,
user_id INTEGER NOT NULL references mcaptcha_users(ID) ON DELETE CASCADE,
challenge_id varchar(40) NOT NULL UNIQUE,
received timestamptz NOT NULL DEFAULT now()
);

View File

@@ -1,7 +0,0 @@
CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics (
config_id INTEGER references mcaptcha_config(config_id) ON DELETE CASCADE,
time INTEGER NOT NULL,
difficulty_factor INTEGER NOT NULL,
worker_type VARCHAR(100) NOT NULL,
ID SERIAL PRIMARY KEY NOT NULL
);

View File

@@ -1,5 +0,0 @@
CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id (
id SERIAL PRIMARY KEY NOT NULL,
config_id INTEGER NOT NULL references mcaptcha_config(config_id) ON DELETE CASCADE,
psuedo_id varchar(100) NOT NULL UNIQUE
);

View File

@@ -1,45 +1,5 @@
{
"db": "PostgreSQL",
"017576128f1c63aee062799a33f872457fe19f5d6429d0af312dc00c244b31cb": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int4"
},
{
"name": "time",
"ordinal": 1,
"type_info": "Int4"
},
{
"name": "difficulty_factor",
"ordinal": 2,
"type_info": "Int4"
},
{
"name": "worker_type",
"ordinal": 3,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text",
"Int8",
"Int8"
]
}
},
"query": "SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics\n WHERE \n config_id = (\n SELECT \n config_id FROM mcaptcha_config \n WHERE \n key = $1\n )\n ORDER BY ID\n OFFSET $2 LIMIT $3\n "
},
"02deb524bb12632af9b7883975f75fdc30d6775d836aff647add1dffd1a4bc00": {
"describe": {
"columns": [
@@ -121,6 +81,33 @@
},
"query": "DELETE FROM mcaptcha_sitekey_user_provided_avg_traffic\n WHERE config_id = (\n SELECT config_id \n FROM \n mcaptcha_config \n WHERE\n key = ($1) \n AND \n user_id = (SELECT ID FROM mcaptcha_users WHERE name = $2)\n );"
},
"0fe29ca10e9a83f2064b1b98f570161d339891a74c637077b94d138a4360340e": {
"describe": {
"columns": [
{
"name": "email",
"ordinal": 0,
"type_info": "Varchar"
},
{
"name": "name",
"ordinal": 1,
"type_info": "Varchar"
}
],
"nullable": [
true,
false
],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "SELECT\n email, name\n FROM\n mcaptcha_users\n WHERE\n ID = (\n SELECT\n user_id\n FROM\n mcaptcha_challenge\n WHERE\n challenge_id = $1\n AND reason = (SELECT ID FROM mcaptcha_challenge_reason WHERE name = $2)\n );"
},
"16864df9cf9a69c299d9ab68bac559c48f4fc433541a10f7c1b60717df2b820e": {
"describe": {
"columns": [
@@ -159,6 +146,21 @@
},
"query": "SELECT key, name, config_id, duration FROM mcaptcha_config WHERE\n user_id = (SELECT ID FROM mcaptcha_users WHERE name = $1) "
},
"1e08fab612b17ab3cf3f76cd1543fb4d4006f7c20e09ecb58e1a1cfd5a7e70a2": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar",
"Timestamptz",
"Text",
"Text"
]
}
},
"query": "INSERT INTO mcaptcha_challenge (challenge_id, received, reason, user_id)\n VALUES ($1, $2, \n (SELECT ID FROM mcaptcha_challenge_reason WHERE name = $3),\n (SELECT ID FROM mcaptcha_users WHERE name = $4)\n );\n "
},
"1e9fe69b23e4bfa7bb369455753100307e334e8dbaf02ff37cda08992fe95910": {
"describe": {
"columns": [],
@@ -172,26 +174,6 @@
},
"query": "UPDATE mcaptcha_users set name = $1\n WHERE name = $2"
},
"21cdf28d8962389d22c8ddefdad82780f5316737e3d833623512aa12a54a026a": {
"describe": {
"columns": [
{
"name": "key",
"ordinal": 0,
"type_info": "Varchar"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT\n key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = $1\n );"
},
"2b319a202bb983d5f28979d1e371f399125da1122fbda36a5a55b75b9c743451": {
"describe": {
"columns": [],
@@ -240,18 +222,6 @@
},
"query": "SELECT email FROM mcaptcha_users WHERE name = $1"
},
"30d8945806b4c68b6da800395f61c1e480839093bfcda9c693bf1972a65c7d79": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = ($1)\n );"
},
"3b1c8128fc48b16d8e8ea6957dd4fbc0eb19ae64748fd7824e9f5e1901dd1726": {
"describe": {
"columns": [],
@@ -478,26 +448,6 @@
},
"query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES ($1, $2, $3)"
},
"839dfdfc3543b12128cb2b44bf356cd81f3da380963e5684ec3624a0ea4f9547": {
"describe": {
"columns": [
{
"name": "psuedo_id",
"ordinal": 0,
"type_info": "Varchar"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1));\n "
},
"84484cb6892db29121816bc5bff5702b9e857e20aa14e79d080d78ae7593153b": {
"describe": {
"columns": [
@@ -519,6 +469,19 @@
},
"query": "SELECT time FROM mcaptcha_pow_solved_stats \n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config \n WHERE \n key = $1\n AND\n user_id = (\n SELECT \n ID FROM mcaptcha_users WHERE name = $2)) \n ORDER BY time DESC"
},
"8a624372ec26200acdbc1c6c330dad841581e9abad586fa7f5a117a7cd289bd9": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Text"
]
}
},
"query": "DELETE\n FROM mcaptcha_challenge\n WHERE\n challenge_id = $1\n AND reason = (SELECT ID FROM mcaptcha_challenge_reason WHERE name = $2);"
},
"9753721856a47438c5e72f28fd9d149db10c48e677b4613bf3f1e8487908aac8": {
"describe": {
"columns": [
@@ -545,6 +508,18 @@
},
"query": "SELECT difficulty_factor, visitor_threshold FROM mcaptcha_levels WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = ($1)\n ) ORDER BY difficulty_factor ASC;"
},
"a209d14eb2c2eba8a750d66f74f8edcdbb02cf7c6c5249b226db30f52541a79b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Varchar"
]
}
},
"query": "INSERT INTO\n mcaptcha_challenge_reason (name)\n VALUES ($1) ON CONFLICT DO NOTHING\n "
},
"ad196ab3ef9dc32f6de2313577ccd6c26eae9ab19df5f71ce182651983efb99a": {
"describe": {
"columns": [
@@ -585,33 +560,6 @@
},
"query": "SELECT EXISTS (SELECT 1 from mcaptcha_users WHERE name = $1)"
},
"af47990880a92c63d1cf5192203899c72621479dc6bb47859fb4498264b78033": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Int4",
"Int4",
"Varchar"
]
}
},
"query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)"
},
"b67da576ff30a1bc8b1c0a79eff07f0622bd9ea035d3de15b91f5e1e8a5fda9b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = $1\n )\n "
},
"b97d810814fbeb2df19f47bcfa381bc6fb7ac6832d040b377cf4fca2ca896cfb": {
"describe": {
"columns": [],
@@ -664,19 +612,6 @@
},
"query": "SELECT name, password FROM mcaptcha_users WHERE email = ($1)"
},
"c1bb8e02d1f9dc28322309d055de3c40ed4e1a1b9453a7e5a93a70e5186d762d": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Varchar"
]
}
},
"query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE key = ($1)),\n $2\n );"
},
"c2e167e56242de7e0a835e25004b15ca8340545fa0ca7ac8f3293157d2d03d98": {
"describe": {
"columns": [

View File

@@ -22,7 +22,6 @@ use sqlx::postgres::PgPoolOptions;
use sqlx::types::time::OffsetDateTime;
use sqlx::ConnectOptions;
use sqlx::PgPool;
use uuid::Uuid;
pub mod errors;
#[cfg(test)]
@@ -96,6 +95,23 @@ impl Migrate for Database {
.run(&self.pool)
.await
.map_err(|e| DBError::DBError(Box::new(e)))?;
for reason in [
ChallengeReason::EmailVerification,
ChallengeReason::PasswordReset,
] {
sqlx::query!(
"INSERT INTO
mcaptcha_challenge_reason (name)
VALUES ($1) ON CONFLICT DO NOTHING
",
reason.to_str()
)
.execute(&self.pool)
.await
.map_err(|e| DBError::DBError(Box::new(e)))?;
}
Ok(())
}
}
@@ -903,191 +919,96 @@ impl MCDatabase for Database {
Ok(Date::dates_to_unix(records))
}
/// record PoW timing
async fn analysis_save(
/// Record challenge in database
async fn new_challenge(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
user: &str,
challenge: &mut Challenge,
) -> DBResult<()> {
let _ = sqlx::query!(
"INSERT INTO mcaptcha_pow_analytics
(config_id, time, difficulty_factor, worker_type)
VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)",
captcha_id,
d.time as i32,
d.difficulty_factor as i32,
&d.worker_type,
let now = now_unix_time_stamp();
loop {
let res = sqlx::query!(
"INSERT INTO mcaptcha_challenge (challenge_id, received, reason, user_id)
VALUES ($1, $2,
(SELECT ID FROM mcaptcha_challenge_reason WHERE name = $3),
(SELECT ID FROM mcaptcha_users WHERE name = $4)
);
",
&challenge.challenge.to_string(),
now,
challenge.reason.to_str(),
user
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
.await;
if let Err(Error::Database(err)) = res {
use std::borrow::Cow;
if err.code() == Some(Cow::from("23505")) {
let msg = err.message();
if msg.contains("mcaptcha_challenge_challenge_id_key") {
challenge.new_id();
continue;
}
}
}
break;
}
Ok(())
}
/// fetch PoW analytics
async fn analytics_fetch(
/// Record challenge in database
async fn fetch_challenge_user(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>> {
struct P {
id: i32,
time: i32,
difficulty_factor: i32,
worker_type: String,
}
impl From<P> for PerformanceAnalytics {
fn from(v: P) -> Self {
Self {
time: v.time as u32,
difficulty_factor: v.difficulty_factor as u32,
worker_type: v.worker_type,
id: v.id as usize,
}
}
}
let mut c = sqlx::query_as!(
P,
"SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics
WHERE
config_id = (
SELECT
config_id FROM mcaptcha_config
WHERE
key = $1
)
ORDER BY ID
OFFSET $2 LIMIT $3
",
&captcha_id,
offset as i32,
limit as i32
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
let mut res = Vec::with_capacity(c.len());
for i in c.drain(0..) {
res.push(i.into())
}
Ok(res)
}
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()> {
let id = Uuid::new_v4();
sqlx::query!(
"
INSERT INTO
mcaptcha_psuedo_campaign_id (config_id, psuedo_id)
VALUES (
(SELECT config_id FROM mcaptcha_config WHERE key = ($1)),
$2
);",
captcha_id,
&id.to_string(),
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String> {
struct ID {
psuedo_id: String,
challenge: &Challenge,
) -> DBResult<ChallengeUser> {
struct U {
name: String,
email: Option<String>,
}
let res = sqlx::query_as!(
ID,
"SELECT psuedo_id FROM
mcaptcha_psuedo_campaign_id
WHERE
config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1));
",
captcha_id
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.psuedo_id)
}
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
&self,
psuedo_id: &str,
) -> DBResult<String> {
struct ID {
key: String,
}
let res = sqlx::query_as!(
ID,
U,
"SELECT
key
email, name
FROM
mcaptcha_config
mcaptcha_users
WHERE
config_id = (
ID = (
SELECT
config_id
user_id
FROM
mcaptcha_psuedo_campaign_id
mcaptcha_challenge
WHERE
psuedo_id = $1
challenge_id = $1
AND reason = (SELECT ID FROM mcaptcha_challenge_reason WHERE name = $2)
);",
psuedo_id
challenge.challenge.to_string(),
challenge.reason.to_str(),
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.key)
.map_err(map_register_err)?;
Ok(ChallengeUser {
username: res.name,
email: res.email.unwrap(),
})
}
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()> {
/// Delete a challenge from database
async fn delete_challenge(&self, challenge: &Challenge) -> DBResult<()> {
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_psuedo_campaign_id
WHERE config_id = (
SELECT config_id FROM mcaptcha_config WHERE key = ($1)
);",
campaign_id
)
.execute(&self.pool)
.await;
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_pow_analytics
"DELETE
FROM mcaptcha_challenge
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE key = $1
)
",
campaign_id
challenge_id = $1
AND reason = (SELECT ID FROM mcaptcha_challenge_reason WHERE name = $2);",
&challenge.challenge.to_string(),
challenge.reason.to_str(),
)
.execute(&self.pool)
.await;
Ok(())
}
}

View File

@@ -136,9 +136,7 @@ export default {
testEnvironment: "jest-environment-jsdom",
// Options that will be passed to the testEnvironment
testEnvironmentOptions: {
"url": "http://localhost:7000/widget/?sitekey=imbatman"
},
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,

19224
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -10,35 +10,34 @@
"test": "jest"
},
"devDependencies": {
"@types/jest": "^29.5.2",
"@types/jsdom": "^21.1.1",
"@types/node": "^20.3.3",
"@types/sinon": "^10.0.15",
"@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1",
"@wasm-tool/wasm-pack-plugin": "^1.7.0",
"css-loader": "^6.8.1",
"css-minimizer-webpack-plugin": "^5.0.1",
"eslint": "^8.44.0",
"jest": "^29.5.0",
"jest-environment-jsdom": "^29.5.0",
"@types/jest": "^27.0.2",
"@types/jsdom": "^16.2.10",
"@types/node": "^16.10.4",
"@types/sinon": "^10.0.0",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"@wasm-tool/wasm-pack-plugin": "^1.4.0",
"css-loader": "^6.4.0",
"css-minimizer-webpack-plugin": "^3.1.1",
"sass": "^1.25.0",
"eslint": "^8.0.0",
"jest": "^27.2.5",
"jest-fetch-mock": "^3.0.3",
"jsdom": "^22.1.0",
"mini-css-extract-plugin": "^2.7.6",
"sass": "^1.63.6",
"sass-loader": "^13.3.2",
"sinon": "^15.2.0",
"ts-jest": "^29.1.1",
"ts-loader": "^9.4.4",
"ts-node": "^10.9.1",
"typescript": "^5.1.6",
"webpack": "^5.88.1",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "^4.15.1"
"jsdom": "^18.0.0",
"mini-css-extract-plugin": "^2.4.2",
"sass-loader": "^12.2.0",
"sinon": "^11.1.2",
"ts-jest": "^27.0.5",
"ts-loader": "^9.2.6",
"ts-node": "^10.3.0",
"typescript": "^4.1.0",
"webpack": "^5.0.0",
"webpack-cli": "^4.6.0",
"webpack-dev-server": "^4.3.1"
},
"dependencies": {
"@mcaptcha/pow_sha256-polyfill": "^0.1.0-alpha-1",
"@mcaptcha/pow-wasm": "^0.1.0-alpha-1",
"@mcaptcha/vanilla-glue": "^0.1.0-alpha-3"
"@mcaptcha/pow_sha256-polyfill": "^0.1.0-alpha-1",
"@mcaptcha/vanilla-glue": "^0.1.0-alpha-1"
}
}

View File

@@ -28,8 +28,8 @@ git clone https://github.com/mCaptcha/integration .
if is_ci
then
yarn install
xvfb-run --auto-servernum npm run test.firefox
xvfb-run --auto-servernum npm run test.chrome
xvfb-run --auto-servernum npm run test.firefox
else
yarn install
npx nightwatch ./test/mCaptcha.ts

View File

@@ -31,7 +31,6 @@ pub struct CreateCaptcha {
pub levels: Vec<Level>,
pub duration: u32,
pub description: String,
pub publish_benchmarks: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
@@ -53,11 +52,6 @@ pub async fn create(
) -> ServiceResult<impl Responder> {
let username = id.identity().unwrap();
let mcaptcha_config = runner::create(&payload, &data, &username).await?;
if payload.publish_benchmarks {
data.db
.analytics_create_psuedo_id_if_not_exists(&mcaptcha_config.key)
.await?;
}
Ok(HttpResponse::Ok().json(mcaptcha_config))
}

View File

@@ -60,9 +60,6 @@ pub struct TrafficPatternRequest {
pub broke_my_site_traffic: Option<u32>,
/// Captcha description
pub description: String,
/// publish benchmarks
pub publish_benchmarks: bool,
}
impl From<&TrafficPatternRequest> for TrafficPattern {
@@ -130,14 +127,12 @@ async fn create(
levels,
duration: data.settings.captcha.default_difficulty_strategy.duration,
description: payload.description,
publish_benchmarks: payload.publish_benchmarks,
};
let mcaptcha_config = create_runner(&msg, &data, &username).await?;
data.db
.add_traffic_pattern(&username, &mcaptcha_config.key, &pattern)
.await?;
Ok(HttpResponse::Ok().json(mcaptcha_config))
}
@@ -167,7 +162,6 @@ async fn update(
duration: data.settings.captcha.default_difficulty_strategy.duration,
description: payload.pattern.description,
key: payload.key,
publish_benchmarks: payload.pattern.publish_benchmarks,
};
update_captcha_runner(&msg, &data, &username).await?;
@@ -298,7 +292,6 @@ pub mod tests {
peak_sustainable_traffic: 1_000_000,
broke_my_site_traffic: Some(10_000_000),
description: NAME.into(),
publish_benchmarks: false,
};
let default_levels = calculate(
@@ -330,11 +323,6 @@ pub mod tests {
assert_eq!(get_level_resp.status(), StatusCode::OK);
let res_levels: Vec<Level> = test::read_body_json(get_level_resp).await;
assert_eq!(res_levels, default_levels);
assert!(!data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
// END create_easy
// START update_easy
@@ -343,7 +331,6 @@ pub mod tests {
peak_sustainable_traffic: 10_000,
broke_my_site_traffic: Some(1_000_000),
description: NAME.into(),
publish_benchmarks: true,
};
let updated_default_values = calculate(
@@ -365,11 +352,6 @@ pub mod tests {
)
.await;
assert_eq!(update_token_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
let get_level_resp = test::call_service(
&app,
@@ -412,52 +394,5 @@ pub mod tests {
));
assert!(body.contains(&payload.pattern.avg_traffic.to_string()));
assert!(body.contains(&payload.pattern.peak_sustainable_traffic.to_string()));
// START update_easy to delete published results
let mut payload2 = TrafficPatternRequest {
avg_traffic: 100_000,
peak_sustainable_traffic: 1_000_000,
broke_my_site_traffic: Some(10_000_000),
description: NAME.into(),
publish_benchmarks: true,
};
let add_token_resp = test::call_service(
&app,
post_request!(&payload2, ROUTES.captcha.easy.create)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(add_token_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
let token_key2: MCaptchaDetails = test::read_body_json(add_token_resp).await;
payload2.publish_benchmarks = false;
let payload = UpdateTrafficPattern {
pattern: payload2,
key: token_key2.key.clone(),
};
let update_token_resp = test::call_service(
&app,
post_request!(&payload, ROUTES.captcha.easy.update)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(update_token_resp.status(), StatusCode::OK);
assert!(!data
.db
.analytics_captcha_is_published(&token_key2.key)
.await
.unwrap());
}
}

View File

@@ -82,7 +82,6 @@ pub async fn level_routes_work(data: ArcData) {
levels: levels.clone(),
description: add_level.description,
duration: add_level.duration,
publish_benchmarks: true,
};
let add_token_resp = test::call_service(

View File

@@ -76,7 +76,6 @@ pub struct UpdateCaptcha {
pub duration: u32,
pub description: String,
pub key: String,
pub publish_benchmarks: bool,
}
#[my_codegen::post(
@@ -140,16 +139,6 @@ pub mod runner {
e
);
}
if payload.publish_benchmarks {
data.db
.analytics_create_psuedo_id_if_not_exists(&payload.key)
.await?;
} else {
data.db
.analytics_delete_all_records_for_campaign(&payload.key)
.await?;
}
Ok(())
}
}

View File

@@ -109,8 +109,8 @@ pub async fn init_mcaptcha(data: &AppData, key: &str) -> ServiceResult<()> {
for level in levels.iter() {
let level = LevelBuilder::default()
.visitor_threshold(level.visitor_threshold)
.difficulty_factor(level.difficulty_factor)
.visitor_threshold(level.visitor_threshold as u32)
.difficulty_factor(level.difficulty_factor as u32)
.unwrap()
.build()
.unwrap();
@@ -250,7 +250,6 @@ pub mod tests {
levels: levels.into(),
duration: 30,
description: "dummy".into(),
publish_benchmarks: true,
};
// 1. add level
@@ -268,11 +267,11 @@ pub mod tests {
key: token_key.key.clone(),
};
let _url = V1_API_ROUTES.pow.get_config;
let url = V1_API_ROUTES.pow.get_config;
let mut prev = 0;
for (count, l) in levels.iter().enumerate() {
for _l in prev..l.visitor_threshold * 2 {
let _get_config_resp = test::call_service(
for l in prev..l.visitor_threshold * 2 {
let get_config_resp = test::call_service(
&app,
post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config)
.to_request(),

View File

@@ -32,27 +32,6 @@ pub struct ValidationToken {
pub token: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ApiWork {
pub string: String,
pub result: String,
pub nonce: u64,
pub key: String,
pub time: Option<u32>,
pub worker_type: Option<String>,
}
impl From<ApiWork> for Work {
fn from(value: ApiWork) -> Self {
Self {
string: value.string,
nonce: value.nonce,
result: value.result,
key: value.key,
}
}
}
// API keys are mcaptcha actor names
/// route handler that verifies PoW and issues a solution token
@@ -60,7 +39,7 @@ impl From<ApiWork> for Work {
#[my_codegen::post(path = "V1_API_ROUTES.pow.verify_pow()")]
pub async fn verify_pow(
req: HttpRequest,
payload: web::Json<ApiWork>,
payload: web::Json<Work>,
data: AppData,
) -> ServiceResult<impl Responder> {
#[cfg(not(test))]
@@ -73,19 +52,8 @@ pub async fn verify_pow(
let ip = "127.0.1.1".into();
let key = payload.key.clone();
let payload = payload.into_inner();
let worker_type = payload.worker_type.clone();
let time = payload.time;
let (res, difficulty_factor) = data.captcha.verify_pow(payload.into(), ip).await?;
let res = data.captcha.verify_pow(payload.into_inner(), ip).await?;
data.stats.record_solve(&data, &key).await?;
if time.is_some() && worker_type.is_some() {
let analytics = db_core::CreatePerformanceAnalytics {
difficulty_factor,
time: time.unwrap(),
worker_type: worker_type.unwrap(),
};
data.db.analysis_save(&key, &analytics).await?;
}
let payload = ValidationToken { token: res };
Ok(HttpResponse::Ok().json(payload))
}
@@ -113,81 +81,6 @@ pub mod tests {
verify_pow_works(data).await;
}
#[actix_rt::test]
async fn verify_analytics_pow_works_pg() {
let data = crate::tests::pg::get_data().await;
verify_analytics_pow_works(data).await;
}
#[actix_rt::test]
async fn verify_analytics_pow_works_maria() {
let data = crate::tests::maria::get_data().await;
verify_analytics_pow_works(data).await;
}
pub async fn verify_analytics_pow_works(data: ArcData) {
const NAME: &str = "powanalyticsuser";
const PASSWORD: &str = "testingpas";
const EMAIL: &str = "powanalyticsuser@a.com";
let data = &data;
delete_user(data, NAME).await;
register_and_signin(data, NAME, EMAIL, PASSWORD).await;
let (_, _signin_resp, token_key) = add_levels_util(data, NAME, PASSWORD).await;
let app = get_app!(data).await;
let get_config_payload = GetConfigPayload {
key: token_key.key.clone(),
};
// update and check changes
let get_config_resp = test::call_service(
&app,
post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config)
.to_request(),
)
.await;
assert_eq!(get_config_resp.status(), StatusCode::OK);
let config: PoWConfig = test::read_body_json(get_config_resp).await;
let pow = pow_sha256::ConfigBuilder::default()
.salt(config.salt)
.build()
.unwrap();
let work = pow
.prove_work(&config.string.clone(), config.difficulty_factor)
.unwrap();
let work = ApiWork {
string: config.string.clone(),
result: work.result,
nonce: work.nonce,
key: token_key.key.clone(),
time: Some(100),
worker_type: Some("wasm".into()),
};
let pow_verify_resp = test::call_service(
&app,
post_request!(&work, V1_API_ROUTES.pow.verify_pow).to_request(),
)
.await;
assert_eq!(pow_verify_resp.status(), StatusCode::OK);
let limit = 50;
let offset = 0;
let mut analytics = data
.db
.analytics_fetch(&token_key.key, limit, offset)
.await
.unwrap();
assert_eq!(analytics.len(), 1);
let a = analytics.pop().unwrap();
assert_eq!(a.time, work.time.unwrap());
assert_eq!(a.worker_type, work.worker_type.unwrap());
}
pub async fn verify_pow_works(data: ArcData) {
const NAME: &str = "powverifyusr";
const PASSWORD: &str = "testingpas";
@@ -236,12 +129,6 @@ pub mod tests {
)
.await;
assert_eq!(pow_verify_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_fetch(&token_key.key, 50, 0)
.await
.unwrap()
.is_empty());
let string_not_found = test::call_service(
&app,

View File

@@ -83,11 +83,7 @@ impl SystemGroup {
enum_system_wrapper!(get_pow, String, CaptchaResult<Option<PoWConfig>>);
// utility function to verify [Work]
pub async fn verify_pow(
&self,
msg: Work,
ip: String,
) -> CaptchaResult<(String, u32)> {
pub async fn verify_pow(&self, msg: Work, ip: String) -> CaptchaResult<String> {
match self {
Self::Embedded(val) => val.verify_pow(msg, ip).await,
Self::Redis(val) => val.verify_pow(msg, ip).await,
@@ -207,9 +203,9 @@ impl Data {
};
let stats: Box<dyn Stats> = if s.captcha.enable_stats {
Box::<Real>::default()
Box::new(Real::default())
} else {
Box::<Dummy>::default()
Box::new(Dummy::default())
};
let data = Data {

View File

@@ -132,7 +132,7 @@ mod tests {
let duration = Duration::from_secs(DURATION);
// register works
DemoUser::register_demo_user(&data).await.unwrap();
let _ = DemoUser::register_demo_user(&data).await.unwrap();
let payload = AccountCheckPayload {
val: DEMO_USER.into(),
};

View File

@@ -35,22 +35,15 @@ struct AdvanceEditPage {
name: String,
key: String,
levels: Vec<Level>,
publish_benchmarks: bool,
}
impl AdvanceEditPage {
fn new(
config: Captcha,
levels: Vec<Level>,
key: String,
publish_benchmarks: bool,
) -> Self {
fn new(config: Captcha, levels: Vec<Level>, key: String) -> Self {
AdvanceEditPage {
duration: config.duration as u32,
name: config.description,
levels,
key,
publish_benchmarks,
}
}
}
@@ -70,9 +63,8 @@ pub async fn advance(
let config = data.db.get_captcha_config(&username, &key).await?;
let levels = data.db.get_captcha_levels(Some(&username), &key).await?;
let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?;
let body = AdvanceEditPage::new(config, levels, key, publish_benchmarks)
let body = AdvanceEditPage::new(config, levels, key)
.render_once()
.unwrap();
Ok(HttpResponse::Ok()
@@ -114,14 +106,11 @@ pub async fn easy(
match data.db.get_traffic_pattern(&username, &key).await {
Ok(c) => {
let config = data.db.get_captcha_config(&username, &key).await?;
let publish_benchmarks =
data.db.analytics_captcha_is_published(&key).await?;
let pattern = TrafficPatternRequest {
peak_sustainable_traffic: c.peak_sustainable_traffic,
avg_traffic: c.avg_traffic,
broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n),
peak_sustainable_traffic: c.peak_sustainable_traffic as u32,
avg_traffic: c.avg_traffic as u32,
broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n as u32),
description: config.description,
publish_benchmarks,
};
let page = EasyEditPage::new(key, pattern).render_once().unwrap();

View File

@@ -36,7 +36,6 @@ struct IndexPage {
key: String,
levels: Vec<Level>,
stats: CaptchaStats,
publish_benchmarks: bool,
}
impl IndexPage {
@@ -45,7 +44,6 @@ impl IndexPage {
config: Captcha,
levels: Vec<Level>,
key: String,
publish_benchmarks: bool,
) -> Self {
IndexPage {
duration: config.duration as u32,
@@ -53,7 +51,6 @@ impl IndexPage {
levels,
key,
stats,
publish_benchmarks,
}
}
}
@@ -73,9 +70,8 @@ pub async fn view_sitekey(
let config = data.db.get_captcha_config(&username, &key).await?;
let levels = data.db.get_captcha_levels(Some(&username), &key).await?;
let stats = data.stats.fetch(&data, &username, &key).await?;
let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?;
let body = IndexPage::new(stats, config, levels, key, publish_benchmarks)
let body = IndexPage::new(stats, config, levels, key)
.render_once()
.unwrap();
Ok(HttpResponse::Ok()

View File

@@ -19,7 +19,7 @@ use std::{env, fs};
use config::{Config, ConfigError, Environment, File};
use derive_more::Display;
use log::{debug, warn};
use serde::{Deserialize, Serialize};
use url::Url;
@@ -191,7 +191,7 @@ impl Settings {
.unwrap();
log::info!("Overriding [database].url and [database].database_type with environment variable");
}
Err(_e) => {
Err(e) => {
set_database_url(&mut s);
}
}

View File

@@ -52,8 +52,8 @@ pub mod pg {
settings.captcha.runners = Some(1);
settings.database.url = url.clone();
settings.database.database_type = DBType::Postgres;
Data::new(&settings).await
let data = Data::new(&settings).await;
data
}
}
pub mod maria {
@@ -71,8 +71,8 @@ pub mod maria {
settings.captcha.runners = Some(1);
settings.database.url = url.clone();
settings.database.database_type = DBType::Maria;
Data::new(&settings).await
let data = Data::new(&settings).await;
data
}
}
//pub async fn get_data() -> ArcData {
@@ -118,7 +118,7 @@ macro_rules! get_app {
.wrap(actix_middleware::NormalizePath::new(
actix_middleware::TrailingSlash::Trim,
))
.configure($crate::routes::services),
.configure(crate::routes::services),
)
};
($data:expr) => {
@@ -262,6 +262,5 @@ pub fn get_level_data() -> CreateCaptcha {
levels,
duration: 30,
description: "dummy".into(),
publish_benchmarks: false,
}
}

View File

@@ -45,16 +45,5 @@
<. } .>
<. } .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
name="publish_benchmarks"
id="publish_benchmarks"
/>
</label>
<button class="sitekey-form__submit" type="submit">Submit</button>
</form>

View File

@@ -38,13 +38,6 @@ export const addSubmitEventListener = (): void =>
const submit = async (e: Event) => {
e.preventDefault();
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
FORM.querySelector("#publish_benchmarks")
);
const description = validateDescription(e);
const duration = validateDuration();
@@ -57,7 +50,6 @@ const submit = async (e: Event) => {
levels: levels,
duration,
description,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
console.debug(`[form submition] json payload: ${JSON.stringify(payload)}`);

View File

@@ -23,7 +23,6 @@
/>
</label>
<label class="sitekey-form__label" for="avg_traffic">
Average Traffic of your website
<input
@@ -39,6 +38,7 @@
</label>
<label class="sitekey-form__label" for="avg_traffic">
Maximum traffic that your website can handle
<input
@@ -68,17 +68,5 @@
/>
</label>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
name="publish_benchmarks"
id="publish_benchmarks"
/>
</label>
<button class="sitekey-form__submit" type="submit">Submit</button>
</form>

View File

@@ -42,7 +42,6 @@ type TrafficPattern = {
peak_sustainable_traffic: number;
broke_my_site_traffic?: number;
description: string;
publish_benchmarks: boolean;
};
export const validate = (e: Event): TrafficPattern => {
@@ -50,7 +49,9 @@ export const validate = (e: Event): TrafficPattern => {
let broke_is_set = false;
const AVG_TRAFFIC = <HTMLInputElement>FORM.querySelector("#avg_traffic");
const AVG_TRAFFIC = <HTMLInputElement>(
FORM.querySelector("#avg_traffic")
);
const PEAK_TRAFFIC = <HTMLInputElement>(
FORM.querySelector("#peak_sustainable_traffic")
);
@@ -58,10 +59,6 @@ export const validate = (e: Event): TrafficPattern => {
FORM.querySelector("#broke_my_site_traffic")
);
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
FORM.querySelector("#publish_benchmarks")
);
isBlankString(AVG_TRAFFIC.value, avg_traffic_name);
isBlankString(PEAK_TRAFFIC.value, peak_traffic_name);
@@ -104,7 +101,6 @@ export const validate = (e: Event): TrafficPattern => {
peak_sustainable_traffic,
broke_my_site_traffic,
description,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
return payload;

View File

@@ -16,22 +16,6 @@
<. } .>
<. let level = levels.len() + 1; .>
<. include!("../add/advance/add-level.html"); .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
name="publish_benchmarks"
<. if publish_benchmarks { .>
checked
<. }.>
/>
</label>
<button data-sitekey="<.= key .>"
id="sitekey-form__submit" class="sitekey-form__submit" type="submit">
Submit

View File

@@ -61,21 +61,6 @@
/>
</label>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
name="publish_benchmarks"
<. if pattern.publish_benchmarks { .>
checked
<. }.>
/>
</label>
<button data-sitekey="<.= key .>" class="sitekey-form__submit" type="submit">
Submit
</button>

View File

@@ -47,19 +47,11 @@ const submit = async (e: Event) => {
const key = BTN.get().dataset.sitekey;
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
Add.FORM.querySelector("#publish_benchmarks")
);
const payload = {
levels,
duration,
description,
key,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
console.debug(`[form submition] json payload: ${JSON.stringify(payload)}`);

View File

@@ -19,6 +19,7 @@
<label class="sitekey-form__level-label" for="difficulty<.= num .>">
Difficulty
<input
readonly="readonly"
type="number"
id="difficulty<.= num .>"
class="sitekey-form__level-input"

View File

@@ -23,23 +23,6 @@
<. for (count, level) in levels.iter().enumerate() { .>
<. include!("./existing-level.html"); .>
<. } .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
readonly="readonly"
name="publish_benchmarks"
<. if publish_benchmarks { .>
checked
<. }.>
/>
</label>
<./* synchronise with "./__form-bottom.html" Lines below should break form */.>
</form>
<. include!("./stats.html"); .>

View File

@@ -55,7 +55,7 @@ export const solveCaptchaRunner = async (e: Event): Promise<void> => {
worker.onmessage = async (event: MessageEvent) => {
const resp: ServiceWorkerWork = event.data;
console.log(
`Proof generated. Difficuly: ${config.difficulty_factor} Duration: ${resp.work.time}`
`Proof generated. Difficuly: ${config.difficulty_factor} Duration: ${resp.duration}`
);
const proof: Work = {
@@ -63,8 +63,6 @@ export const solveCaptchaRunner = async (e: Event): Promise<void> => {
string: config.string,
nonce: resp.work.nonce,
result: resp.work.result,
time: Math.trunc(resp.work.time),
worker_type: resp.work.worker_type,
};
// 3. submit work

View File

@@ -11,66 +11,31 @@
import { gen_pow } from "@mcaptcha/pow-wasm";
import * as p from "@mcaptcha/pow_sha256-polyfill";
import { WasmWork, PoWConfig, SubmitWork } from "./types";
import { WasmWork, PoWConfig } from "./types";
/**
* proove work
* @param {PoWConfig} config - the proof-of-work configuration using which
* work needs to be computed
* */
const prove = async (config: PoWConfig): Promise<SubmitWork> => {
const WASM = "wasm";
const JS = "js";
if (WasmSupported) {
const prove = async (config: PoWConfig): Promise<WasmWork> => {
let proof: WasmWork = null;
let res: SubmitWork = null;
let time: number = null;
const t0 = performance.now();
if (WasmSupported) {
const proofString = gen_pow(
config.salt,
config.string,
config.difficulty_factor
);
const t1 = performance.now();
time = t1 - t0;
proof = JSON.parse(proofString);
const worker_type = WASM;
res = {
result: proof.result,
nonce: proof.nonce,
worker_type,
time,
};
return res;
} else {
console.log("WASM unsupported, expect delay during proof generation");
let proof: WasmWork = null;
let time: number = null;
let res: SubmitWork = null;
const t0 = performance.now();
proof = await p.generate_work(
config.salt,
config.string,
config.difficulty_factor
);
const t1 = performance.now();
time = t1 - t0;
const worker_type = JS;
res = {
result: proof.result,
nonce: proof.nonce,
worker_type,
time,
};
return res;
}
return proof;
};
// credits: @jf-bastien on Stack Overflow

View File

@@ -19,9 +19,15 @@ onmessage = async (e) => {
console.debug("message received at worker");
const config: PoWConfig = e.data;
const t0 = performance.now();
const work = await prove(config);
const t1 = performance.now();
const duration = t1 - t0;
const res: ServiceWorkerWork = {
work,
duration,
};
postMessage(res);

View File

@@ -14,15 +14,6 @@ export type Work = {
nonce: number;
string: string;
key: string;
time: number;
worker_type: string;
};
export type SubmitWork = {
time: number;
worker_type: string;
result: string;
nonce: number;
};
export type WasmWork = {
@@ -31,7 +22,8 @@ export type WasmWork = {
};
export type ServiceWorkerWork = {
work: SubmitWork;
work: WasmWork;
duration: number;
};
export type PoWConfig = {

5974
yarn.lock

File diff suppressed because it is too large Load Diff