This commit is contained in:
Fritz Schmid 2022-06-15 13:25:18 +02:00
parent 73dd812daa
commit efafddbca9
11 changed files with 3172 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
target
.code

2177
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

37
Cargo.toml Normal file
View File

@ -0,0 +1,37 @@
[package]
name = "ophe"
version = "0.1.0"
authors = ["fritz <fritz.schmid@fau.de>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
bls12_381 = {features =[ "groups","pairings","alloc"],git="https://github.com/Kradxn/bls12_381"}
#bls12_381 = "*"
rand = "*"
bencher = "*"
bit-vec = "*"
lazy_static = "*"
group = "0.12.0"
rand_core= {version = "0.6", features = ["getrandom"]}
bitvec = "=0.22.0"
sha2 = "*"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
time = "*"
rocket = "0.5.0-rc.1"
rayon = "*"
base64 = "*"
serde_with = "*"
reqwest ={ version = "*", features = ["json"]}
rocket_okapi = { version = "0.8.0-alpha-1", features = ["swagger", "rapidoc"] }
[[bin]]
name = "cryptoservice"
path = "src/cryptoservice.rs"
[[bin]]
name = "ophe"
path = "src/ophe.rs"

29
README.md Normal file
View File

@ -0,0 +1,29 @@
# OPHE Test Implementation
## Requirements
Rust 1.59.0+ stable
# Running the cryptoservice
```ROCKET_PORT=9999 cargo run --release --bin cryptoservice```
# Running the webclient
A rudimentary webclient is available. Currently providing a custom message(for now 32 bytes of value 0 are encrypted) for encryption is not possbile.
```cargo run --release --bin ophe```
Go to http://localhost:8000/rapidoc to checkout the documentation.
# Benchmarking
Benchmark can be run by ```cargo test --release -- --nocapture```
Of course it should be done using --release otherwise performance optimization do not run
Every performance test can be edited to specify the number of threads used by changing 1 in
``` rayon::ThreadPoolBuilder::new().num_threads(1).build_global().unwrap(); ``` to the number of threads you want to use (Meaning the mulitple cpu cores can be used)
Tested with Rust 1.59.0 stable
## Todo
- Split core functionality from the webclient code into 2 seperate crates

279
src/core.rs Normal file
View File

@ -0,0 +1,279 @@
use bls12_381::*;
use crate::utils;
use crate::proofs;
use std::collections::HashMap;
use std::time::Instant;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use rocket_okapi::okapi::schemars;
use rocket_okapi::okapi::schemars::JsonSchema;
use crate::serializers;
use serde_with::serde_as;
#[serde_as]
#[derive(Deserialize, Serialize,JsonSchema)]
pub struct PublicParameters{
#[serde_as(as = "Vec<serializers::SerializeScalar>")]
#[schemars(with = "Vec::<String>")]
pub space: Vec<Scalar>,
pub k: usize,
#[serde_as(as = "serializers::SerializeGt")]
#[schemars(with = "String")]
pub gt_gen: Gt,
#[serde_as(as = "Vec<serializers::SerializeGt>")]
#[schemars(with = "Vec::<String>")]
pub ratelimiter_public_keys: Vec<Gt>
}
pub struct ServerState{
r: Scalar,
pub n: Scalar,
p: G2Projective
}
impl ServerState {
fn new(username: &String, password: &String) -> ServerState{
let r = utils::random_scalar();
let n = utils::random_scalar();
let username = username.to_owned();
let to_be_hashed = username + password;
let h2 = G2Affine::generator() * utils::hash_string_to_scalar(to_be_hashed);
let p = h2 * r;
ServerState{r,n,p}
}
fn from_nonce(username: &String, password: &String, nonce: &Scalar) -> ServerState{
let r = utils::random_scalar();
let username = username.to_owned();
let to_be_hashed = username + password;
let h2 = G2Affine::generator() * utils::hash_string_to_scalar(to_be_hashed);
let p = h2 * r;
ServerState{r,n: nonce.clone(),p}
}
}
#[serde_as]
#[derive(Serialize, Deserialize, JsonSchema)]
pub struct RatelimiterRequest{
#[serde_as(as = "serializers::SerializeScalar")]
#[schemars(with = "String")]
n: Scalar,
#[serde_as(as = "serializers::SerializeG2")]
#[schemars(with = "String")]
p: G2Projective,
}
#[serde_as]
#[derive(Serialize, Deserialize, JsonSchema)]
pub struct RatelimiterResponse{
elements: Vec<RatelimiterResponseElement>
}
#[serde_as]
#[derive(Serialize, Deserialize, JsonSchema)]
pub struct RatelimiterResponseElement{
proof: proofs::SameDLogProof,
#[serde_as(as = "serializers::SerializeGt")]
#[schemars(with = "String")]
p: Gt
}
pub fn phe_init(username: &String, password: &String) -> (ServerState,RatelimiterRequest){
let ss = ServerState::new(username,password);
let rr = RatelimiterRequest{n:ss.n,p:ss.p};
return (ss,rr)
}
pub fn phe_init_decrypt(username: &String, password: &String,n: &Scalar) -> (ServerState,RatelimiterRequest){
let ss = ServerState::from_nonce(username,password,n);
let rr = RatelimiterRequest{n:ss.n,p:ss.p};
return (ss,rr)
}
pub fn phe_ratelimiter(private_keys: &Vec<Scalar>, request: &RatelimiterRequest,pp: &PublicParameters) -> RatelimiterResponse {
let u = pairing(&G1Affine::from(G1Affine::generator() * request.n) , &G2Affine::from(request.p));
let results = private_keys.par_iter().zip(&pp.ratelimiter_public_keys).map(|(private_key,public_key)| {
let value = u*private_key;
let proof = proofs::SameDLogProofPublic{g:pp.gt_gen,h:u,y1:*public_key,y2:value}.proof(&private_key);
RatelimiterResponseElement{proof,p:value}
}).collect::<Vec<_>>();
RatelimiterResponse{elements:results}
}
pub fn phe_enc_finish(msg: &Vec<u8>,pp: &PublicParameters,response: &RatelimiterResponse,ss:&ServerState) -> Result<Vec<Gt>,String>{
let split_message = utils::split_message_into_space(msg,&pp.space,pp.k);
assert_eq!(split_message.len(),pp.ratelimiter_public_keys.len(),"key length {} and msglength {} do not match",pp.ratelimiter_public_keys.len(),split_message.len());
let u = pairing(&G1Affine::from(G1Affine::generator() * ss.n) , &G2Affine::from(ss.p));
let r_inv = ss.r.invert().unwrap();
let values = pp.ratelimiter_public_keys.par_iter().zip(&response.elements).zip(&split_message).map(|((public_key,x),msg)|{
if !x.proof.verify(&proofs::SameDLogProofPublic{g:pp.gt_gen,h:u,y1:*public_key,y2:x.p}){
Err("Invalid Proof".to_string())
}else{
Ok(x.p * r_inv * msg)
}
}).collect::<Result<Vec<_>,String>>();
values
}
pub fn phe_dec_finish_simple(ciphertext: &Vec<Gt>,pp: &PublicParameters,response: &RatelimiterResponse,ss:&ServerState) -> Result<Vec<u8>,String>{
let u = pairing(&G1Affine::from(G1Affine::generator() * ss.n) , &G2Affine::from(ss.p));
let r_inv = ss.r.invert().unwrap();
let values = pp.ratelimiter_public_keys.par_iter().zip(&response.elements).map(|(public_key,x)|{
if !x.proof.verify(&proofs::SameDLogProofPublic{g:pp.gt_gen,h:u,y1:*public_key,y2:x.p}){
Err("Invalid Proof".to_string())
}else{
Ok(x.p * r_inv)
}
}).collect::<Result<Vec<_>,String>>()?;
return Ok(utils::solve_dlog(&values, ciphertext, &pp.space, pp.k)?);
}
fn phe_dec_finish_preparedspace(pp: &PublicParameters,response: &RatelimiterResponse,ss:&ServerState,preparedspace: &Vec<HashMap<[u8;288],usize>>) -> Result<Vec<u8>,String>{
let u = pairing(&G1Affine::from(G1Affine::generator() * ss.n) , &G2Affine::from(ss.p));
let r_inv = ss.r.invert().unwrap();
let values = pp.ratelimiter_public_keys.par_iter().zip(&response.elements).map(|(public_key,x)|{
if !x.proof.verify(&proofs::SameDLogProofPublic{g:pp.gt_gen,h:u,y1:*public_key,y2:x.p}){
Err("Invalid Proof".to_string())
}else{
Ok(x.p * r_inv)
}
}).collect::<Result<Vec<_>,String>>()?;
return Ok(utils::find_in_precomputed_space(&values, &pp.space,preparedspace, pp.k));
}
#[test]
fn test_core(){
rayon::ThreadPoolBuilder::new().num_threads(1).build_global().unwrap();
test_core_k(32,2);
//test_core_k(32,4);
//test_core_k(32,8);
}
#[test]
fn test_speed_operations() {
let mut s1 = utils::random_scalar();
let s2 = utils::random_scalar();
let g11 = G1Affine::from(G1Affine::generator() * s1);
let g11p = G1Affine::generator() * s1;
let g12 = G1Affine::from(G1Affine::generator() * s2);
let g12p = G1Affine::generator() * s2;
let g21 = G2Affine::from(G2Affine::generator() * s1);
let g21p = G2Affine::generator() * s1;
let g22 = G2Affine::from(G2Affine::generator() * s2);
let g22p = G2Affine::generator() * s2;
let mut gt1 = pairing(&g11,&g21);
let gt2 = pairing(&g12,&g22);
let mut g1res = &g11p + &g12p;
let mut g2res = &g21p + &g22p;
let mut gtres = &gt1 + &gt2;
let start = Instant::now();
for _i in 0..100000{
s1 = s1 * s2;
}
println!(" {:.2?} scalar *",start.elapsed()/100000);
let start = Instant::now();
for _i in 0..100000{
s1 = s1 + s2;
}
println!(" {:.2?} scalar +",start.elapsed()/100000);
let start = Instant::now();
for _i in 0..1000{
gt1 = pairing(&g11,&g21);
}
println!(" {:.2?} pairing ",start.elapsed()/1000);
let start = Instant::now();
for _i in 0..1000000{
g1res = &g1res + &g12p;
}
println!(" {:.2?} g1 + ",start.elapsed()/1000000);
let start = Instant::now();
for _i in 0..1000000{
g2res = &g2res + &g22p;
}
println!(" {:.2?} g2 + ",start.elapsed()/1000000);
let start = Instant::now();
for _i in 0..1000000{
gtres = &gtres + &gt2;
}
println!(" {:.2?} gt + ",start.elapsed()/1000000);
let start = Instant::now();
for _i in 0..10000{
g1res = &g1res * s1;
}
println!(" {:.2?} g1 exp ",start.elapsed()/10000);
let start = Instant::now();
for _i in 0..1000{
g2res = &g2res * s1;
}
println!(" {:.2?} g2 exp ",start.elapsed()/1000);
let start = Instant::now();
for _i in 0..1000{
gtres = &gtres * s1;
}
println!(" {:.2?} gt exp ",start.elapsed()/1000);
}
fn test_core_k(bytes:usize, k:usize) {
let keysize = bytes*8/k; //in bytes
let msg = (0..keysize/8*k).map(|x|(255*x/(keysize/8*k-1)) as u8).collect::<Vec<_>>();
let generator = utils::random_gt();
let keys = (0..keysize).map(|_|utils::random_scalar()).collect::<Vec<_>>();
let public_keys = keys.iter().map(|x|generator*x).collect::<Vec<_>>();
let space = utils::prepare_messages_to_space(k);
let pp = PublicParameters{space, ratelimiter_public_keys:public_keys,gt_gen:generator,k};
let start = Instant::now();
let (ss,request) = phe_init(&"test".to_string(),&"test".to_string());
println!("k: {} {:.2?} phe_init", k,start.elapsed());
let start = Instant::now();
let response = phe_ratelimiter(&keys,&request,&pp);
println!("k: {} {:.2?} phe_ratelimiter", k,start.elapsed());
let start = Instant::now();
let ciphertext = phe_enc_finish(&msg,&pp,&response,&ss).unwrap();
println!("k: {} {:.2?} phe_enc_finish", k,start.elapsed());
let start = Instant::now();
let expected = phe_dec_finish_simple(&ciphertext,&pp,&response,&ss).unwrap();
println!("k: {} {:.2?} phe_dec_finish_simple", k,start.elapsed());
assert_eq!(expected,msg);
let start = Instant::now();
let prepared_space = ciphertext.iter().map(|x|utils::prepare_messages_to_precomputed_space(&pp.space, x)).collect::<Vec<_>>();
println!("k: {} {:.2?} prepare_messages_to_precomputed_space", k,start.elapsed());
let start = Instant::now();
let expected = phe_dec_finish_preparedspace(&pp, &response, &ss, &prepared_space).unwrap();
println!("k: {} {:.2?} phe_dec_finish_preparedspace", k,start.elapsed());
assert_eq!(expected,msg);
}

85
src/cryptoservice.rs Normal file
View File

@ -0,0 +1,85 @@
#[macro_use] extern crate rocket;
extern crate bls12_381;
extern crate rand;
use rocket_okapi::settings::UrlObject;
use rocket_okapi::{openapi, openapi_get_routes, rapidoc::*, swagger_ui::*};
use rocket::serde::json::Json;
use rocket::Request;
use ophe::core::{RatelimiterRequest,RatelimiterResponse};
use ophe::core;
use ophe::utils;
use rocket::State;
use bls12_381::Scalar;
fn make_public_parameters() -> (Vec<Scalar>,core::PublicParameters){
let bytes = 32;
let k = 2;
let keysize = bytes*8/k;
let generator = utils::random_gt();
let keys = (0..keysize).map(|_|utils::random_scalar()).collect::<Vec<_>>();
let public_keys = keys.iter().map(|x|generator*x).collect::<Vec<_>>();
let space = utils::prepare_messages_to_space(k);
(keys,core::PublicParameters{space, ratelimiter_public_keys:public_keys,gt_gen:generator,k})
}
#[rocket::main]
async fn main() {
let (keys,pp) = make_public_parameters();
let launch_result = rocket::build()
.manage(pp)
.manage(keys)
.mount("/", openapi_get_routes![phe_help,get_public_parameters])
.mount(
"/swagger-ui/",
make_swagger_ui(&SwaggerUIConfig {
url: "../openapi.json".to_owned(),
..Default::default()
}),
) .mount(
"/rapidoc/",
make_rapidoc(&RapiDocConfig {
general: GeneralConfig {
spec_urls: vec![UrlObject::new("General", "../openapi.json")],
..Default::default()
},
hide_show: HideShowConfig {
allow_spec_url_load: false,
allow_spec_file_load: false,
..Default::default()
},
..Default::default()
}),
)
.register("/",catchers![serialize_failed])
.launch()
.await;
match launch_result {
Ok(_) => println!("Rocket shut down gracefully."),
Err(err) => println!("Rocket had an error: {}", err),
};
}
#[openapi()]
#[post("/phe_help",format = "json", data = "<request>")]
fn phe_help(request: Json<RatelimiterRequest>,pp: &State<core::PublicParameters>,keys: &State<Vec<Scalar>>) -> Json<RatelimiterResponse> {
Json(core::phe_ratelimiter(keys,&request,pp))
}
#[openapi()]
#[get("/get_public_parameters")]
fn get_public_parameters(pp: &State<core::PublicParameters>) -> Json<&core::PublicParameters>{
Json(pp)
}
#[catch(422)]
fn serialize_failed(_req: &Request) -> String {
format!("Malformed Request")
}

6
src/lib.rs Normal file
View File

@ -0,0 +1,6 @@
#![allow(dead_code)]
mod proofs;
pub mod utils;
mod shamir;
pub mod core;
pub mod serializers;

97
src/proofs.rs Normal file
View File

@ -0,0 +1,97 @@
use bls12_381::*;
use crate::utils;
use crate::serializers;
use serde::{Deserialize, Serialize};
use rocket_okapi::okapi::schemars;
use rocket_okapi::okapi::schemars::JsonSchema;
use serde_with::serde_as;
#[cfg(test)]
use group::Group;
pub struct SameDLogProofPublic {
pub g: Gt,
pub h: Gt,
pub y1: Gt,
pub y2: Gt,
}
impl SameDLogProofPublic{
pub fn new(g: Gt, h: Gt, x: Scalar) -> Self {
SameDLogProofPublic {
g,
h,
y1: g * x,
y2: h * x,
}
}
pub fn proof(&self,x: &Scalar) -> SameDLogProof{
let r = utils::random_scalar();
let c = utils::hash_gt_to_scalar(&[&self.g, &self.h, &self.y1, &self.y2]);
let t = r - x * c;
SameDLogProof {
a: self.g * r,
b: self.h * r,
t,
}
}
}
#[serde_as]
#[derive(Eq,PartialEq,Debug,Serialize,Deserialize,JsonSchema)]
pub struct SameDLogProof {
#[serde_as(as = "serializers::SerializeGt")]
#[schemars(with = "String")]
a: Gt,
#[serde_as(as = "serializers::SerializeGt")]
#[schemars(with = "String")]
b: Gt,
#[serde_as(as = "serializers::SerializeScalar")]
#[schemars(with = "String")]
t: Scalar,
}
impl SameDLogProof {
pub fn verify(&self,pp: &SameDLogProofPublic) -> bool{
let c = utils::hash_gt_to_scalar(&[&pp.g, &pp.h, &pp.y1, &pp.y2]);
self.a == (pp.g * self.t) + (pp.y1 * c) && self.b == (pp.h * self.t) + (pp.y2 * c)
}
}
#[test]
fn test_proof_correct() {
let x = utils::random_scalar();
let g = Gt::generator();
let h = utils::random_gt();
let pp = SameDLogProofPublic::new(g, h, x);
let proof = pp.proof(&x);
assert!(proof.verify(&pp));
}
#[test]
fn test_proof_incorrect() {
let x = utils::random_scalar();
let g = Gt::generator();
let h = utils::random_gt();
let pp = SameDLogProofPublic::new(g, h, utils::random_scalar());
let proof = pp.proof(&x);
assert!(!proof.verify(&pp));
}
#[test]
fn test_proof_serialize() {
let x = utils::random_scalar();
let g = Gt::generator();
let h = utils::random_gt();
let pp = SameDLogProofPublic::new(g, h, utils::random_scalar());
let proof = pp.proof(&x);
let serialized = serde_json::to_string(&proof).unwrap();
let proof2: SameDLogProof = serde_json::from_str(&serialized).unwrap();
assert_eq!(proof, proof2);
}

129
src/serializers.rs Normal file
View File

@ -0,0 +1,129 @@
use crate::serializers;
use bls12_381::*;
use serde::de::{Deserializer, Error};
use serde::ser::Serializer;
use serde::{Deserialize, Serialize};
use std::convert::TryInto;
use base64;
use serde_with::{SerializeAs,DeserializeAs};
use serde_with::serde_as;
pub struct SerializeScalar;
impl SerializeAs<Scalar> for SerializeScalar {
fn serialize_as<S>(g: &Scalar, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64 = base64::encode(g.to_bytes().to_vec());
String::serialize(&base64, serializer)
}
}
impl<'de> DeserializeAs<'de, Scalar> for SerializeScalar {
fn deserialize_as<D>(deserializer: D) -> Result<Scalar, D::Error>
where
D: Deserializer<'de>,
{
let base64 = String::deserialize(deserializer)?;
let s = base64::decode(base64.as_bytes()).map_err(|_x| Error::custom("Invalid Base64 string"))?;
let a: &[u8; 32];
a = s
.as_slice()
.try_into()
.map_err(|_x| Error::custom("Invalid Sized Array"))?;
let result = Scalar::from_bytes(a);
if result.is_none().into() {
Err(Error::custom("Invalid Scalar"))
} else {
Ok(result.unwrap())
}
}
}
pub struct SerializeGt;
impl SerializeAs<Gt> for SerializeGt {
fn serialize_as<S>(g: &Gt, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64 = base64::encode(g.to_uncompressed().to_vec());
String::serialize(&base64, serializer)
}
}
impl<'de> DeserializeAs<'de, Gt> for SerializeGt {
fn deserialize_as<D>(deserializer: D) -> Result<Gt, D::Error>
where
D: Deserializer<'de>,
{
let base64 = String::deserialize(deserializer)?;
let s = base64::decode(base64.as_bytes()).map_err(|_x| Error::custom("Invalid Base64 string"))?;
let a: &[u8; 576];
a = s.as_slice().try_into().map_err(|_x| Error::custom("Invalid Sized Array"))?;
let result = Gt::from_uncompressed(a);
if result.is_none().into() {
Err(Error::custom("Invalid Gt"))
} else {
Ok(result.unwrap())
}
}
}
pub struct SerializeG2;
impl SerializeAs<G2Projective> for SerializeG2 {
fn serialize_as<S>(g: &G2Projective, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64 = base64::encode(G2Affine::from(g).to_compressed().to_vec());
String::serialize(&base64, serializer)
}
}
impl<'de> DeserializeAs<'de, G2Projective> for SerializeG2 {
fn deserialize_as<D>(deserializer: D) -> Result<G2Projective, D::Error>
where
D: Deserializer<'de>,
{
let base64 = String::deserialize(deserializer)?;
let s = base64::decode(base64.as_bytes()).map_err(|_x| Error::custom("Invalid Base64 string"))?;
let a: &[u8; 96];
a = s.as_slice().try_into().map_err(|_x| Error::custom("Invalid Sized Array"))?;
let result = G2Affine::from_compressed(a);
if result.is_none().into() {
Err(Error::custom("Invalid G2 Element"))
} else {
Ok(result.unwrap().into())
}
}
}
#[serde_as]
#[derive(Eq, PartialEq, Debug, Serialize, Deserialize)]
struct GtTest {
#[serde_as(as = "serializers::SerializeGt")]
g: Gt,
}
#[test]
fn test_gt() {
let point = GtTest {
g: crate::utils::random_gt(),
};
let serialized = serde_json::to_string(&point).unwrap();
println!("serialized = {}", serialized);
// Convert the JSON string back to a Point.
let deserialized: GtTest = serde_json::from_str(&serialized).unwrap();
println!("deserialized = {:?}", deserialized);
assert_eq!(deserialized, point);
}

145
src/shamir.rs Normal file
View File

@ -0,0 +1,145 @@
use bls12_381::*;
use crate::utils;
fn lambda(i: i64, shares: i64) -> (Scalar, bool) {
assert!(i >= 0);
assert!(shares > 0);
assert!(shares >= i);
let mut temp = 1.0;
for j in 1..shares + 1 {
if j != i {
temp = temp * (-(j as f64) / ((i as f64) - (j as f64)));
}
}
let scalar = Scalar::from(temp.round().abs() as u64);
if temp < 0.0 {
return (scalar, true);
} else {
return (scalar, false);
}
}
fn recover_shares(shares: &Vec<Gt>, n: i64) -> Gt {
let mut target = Gt::identity();
for i in 0..shares.len() {
let (l, b) = lambda((i + 1) as i64, n);
let mut temp = shares[i] * l;
if b {
temp = -temp
}
target = target + temp;
}
return target;
}
fn recover_shares_scalar(shares: &Vec<Scalar>, n: i64) -> Scalar {
let mut target = Scalar::zero();
for i in 0..shares.len() {
let (l, b) = lambda((i + 1) as i64, n);
let mut temp = shares[i] * l;
if b {
temp = -temp
}
target = target + temp;
}
return target;
}
fn eval_at(poly: &Vec<Gt>, x: i64) -> Gt {
let mut y = Gt::identity();
for j in 0..poly.len() {
y = y + (poly[j] * Scalar::from((x.pow(j as u32)) as u64));
}
return y;
}
fn eval_at_scalar(poly: &Vec<Scalar>, x: i64) -> Scalar {
let mut y = Scalar::zero();
for j in 0..poly.len() {
y = y + (poly[j] * Scalar::from((x.pow(j as u32)) as u64));
}
return y;
}
fn gen_shares(secret: Gt, n: i64, t: i64) -> Vec<Gt> {
assert!(t > 0);
assert!(n >= t);
assert!(t <= n);
let mut poly = Vec::new();
poly.push(secret);
(0..t - 1).for_each(|_| {
poly.push(utils::random_gt());
});
let mut shares = Vec::new();
for i in 0..n {
shares.push(eval_at(&poly, i + 1));
}
return shares;
}
fn gen_shares_scalar(secret: Scalar, n: i64, t: i64) -> Vec<Scalar> {
assert!(t > 0);
assert!(n >= t);
assert!(t <= n);
let mut poly = Vec::new();
poly.push(secret);
(0..t - 1).for_each(|_| {
poly.push(utils::random_scalar());
});
let mut shares = Vec::new();
for i in 0..n {
shares.push(eval_at_scalar(&poly, i + 1));
}
return shares;
}
#[cfg(test)]
fn test_shamir_scalar(t:i64,n:i64){
let scalar = Scalar::from(329183724587293224);
let scalarshares = gen_shares_scalar(scalar,n,t);
let gt = utils::random_scalar();
let shares = gen_shares_scalar(gt,n,t);
let gt2 = recover_shares_scalar(&shares[0..((t+1) as usize)].to_vec(),n);
assert_eq!(gt,gt2);
let mut newershares = Vec::new();
for i in 0..shares.len() {
newershares.push(shares[i]+scalarshares[i]);
}
let gt3 = recover_shares_scalar(&newershares[0..((t+1) as usize)].to_vec(),n);
assert_eq!(gt3,gt+scalar);
}
#[cfg(test)]
fn test_shamir_scalar_into_gt(t:i64,n:i64){
let scalar = utils::random_scalar();
let scalarshares = gen_shares_scalar(scalar,n,t);
let gt2 = recover_shares_scalar(&scalarshares[0..((t+1) as usize)].to_vec(),n);
assert_eq!(scalar,gt2);
let gt_elm1 = utils::random_gt();
let gt_shares1 :Vec<Gt> = scalarshares.iter().map(|x| gt_elm1 * x).collect();
let gt41 = recover_shares(&gt_shares1[0..((t+1) as usize)].to_vec(),n);
assert_eq!(gt_elm1*scalar,gt41);
}
#[test]
fn test(){
let t = 3;
let n = 4;
test_shamir_scalar(t,n);
test_shamir_scalar_into_gt(t,n);
}

186
src/utils.rs Normal file
View File

@ -0,0 +1,186 @@
extern crate time;
use group::Group;
use sha2::{Digest, Sha512};
use std::convert::TryInto;
use bls12_381::*;
use rand_core::{OsRng,RngCore};
use std::collections::HashMap;
use rayon::prelude::*;
#[cfg(test)]
use std::time::Instant;
pub fn random_scalar() -> Scalar {
let mut buf = [0u8; 64];
OsRng.fill_bytes(&mut buf);
Scalar::from_bytes_wide(&buf)
}
pub fn random_gt() -> Gt{
Gt::random(OsRng)
}
pub fn hash_string_to_scalar(element: String) -> Scalar {
let buf:[u8; 64];
let mut hasher = Sha512::new();
hasher.update(element.as_bytes());
let result = hasher.finalize();
buf = result.as_slice().try_into().expect("Wrong length");
Scalar::from_bytes_wide(&buf)
}
pub fn hash_gt_to_scalar(elements: &[&Gt]) -> Scalar {
let buf:[u8; 64];
let mut hasher = Sha512::new();
for x in elements {
hasher.update(x.to_compressed());
}
let result = hasher.finalize();
buf = result.as_slice().try_into().expect("Wrong length");
Scalar::from_bytes_wide(&buf)
}
pub fn prepare_messages_to_space(k: usize) -> Vec<Scalar>{
let mut m = Vec::new();
for j in 0..1 << k {
let current_m = Scalar::from_raw([(j + 1) as u64, 0, 0, 0]);
m.push(current_m);
}
m
}
pub fn prepare_messages_to_precomputed_space(space: &Vec<Scalar>,cipher: &Gt) -> HashMap<[u8;288],usize>{
let mapped = space.par_iter().map(|x|(cipher * x.invert().unwrap()).to_compressed()).collect::<Vec<_>>();
let mut m = HashMap::new();
(0..space.len()).for_each(|i| {
m.insert( mapped[i],i);
});
m
}
pub fn find_in_precomputed_space(ciphertext: &Vec<Gt>,_space: &Vec<Scalar>,maps: &Vec<HashMap<[u8;288],usize>>,k:usize) -> Vec<u8>{
let mut prep = Vec::new();
for i in 0..ciphertext.len() {
let index = maps[i][&ciphertext[i].to_compressed()];
for z in 0..k{
prep.push((index>>z)&1);
}
}
let mut result = Vec::new();
for i in 0..prep.len()/8{
let mut x = 0u8;
for j in 0..8{
x = x + (prep[i*8+j]<<j) as u8;
}
result.push(x as u8);
}
result
}
pub fn split_message_into_space(msg: &Vec<u8>,space: &Vec<Scalar>,k: usize) -> Vec<Scalar> {
let mut preparedmsg = Vec::new();
for i in 0..msg.len() {
for j in 0..8{
preparedmsg.push((msg[i]&(1<<j))>>j);
}
}
let msgsize = preparedmsg.len(); // size of msg in bits
assert!(msgsize%k==0,"Msg must be aligned to space"); //TODO extend msg with size and padding
let steps = msgsize / k;
let mut result = Vec::new();
for i in 0..steps{
let mut x = 0;
for j in 0..k{
x = x + ((preparedmsg[i*k+j] as usize)<<j);
}
result.push(space[x]);
}
result
}
pub fn solve_dlog(target: &Vec<Gt>,ciphertext: &Vec<Gt>,space:& Vec<Scalar>,k: usize) -> Result<Vec<u8>,String>{
let mut prep = Vec::new();
let spacesize = space.len();
let t = target.par_iter().map(|x|space.iter().map(|y|x*y).collect::<Vec<_>>()).collect::<Vec<_>>();
for i in 0..target.len() {
let mut found = false;
for j in 0..spacesize{
if t[i][j] == ciphertext[i]{
found = true;
for z in 0..k{
prep.push((j>>z)&1);
}
}
}
if !found {
return Err("Decryption failed.".to_string())
}
}
let mut result = Vec::new();
for i in 0..prep.len()/8{
let mut x = 0u8;
for j in 0..8{
x = x | (prep[i*8+j]<<j) as u8;
}
result.push(x as u8);
}
Ok(result)
}
#[cfg(test)]
fn test_msg_space_k(k: usize){
let start = Instant::now();
let space= prepare_messages_to_space(k);
println!("k: {} {:.2?} preparing message space", k,start.elapsed());
let msg =[1,2,3,4,255,128,127,151,16,15,127,200,0,13,15,16];
let start = Instant::now();
let split = split_message_into_space(&msg.to_vec(),&space,k);
println!("k: {} {:.2?} spliting into message space", k,start.elapsed());
let gt = random_gt();
let gts = (&split).into_iter().map(|_x|gt).collect::<Vec<Gt>>();
let test = (&split).into_iter().map(|x|gt*x).collect::<Vec<Gt>>();
let start = Instant::now();
let solved = solve_dlog(&gts, &test, &space, k).unwrap();
println!("k: {} {:.2?} solve dlog ", k,start.elapsed());
assert_eq!(msg.to_vec(),solved);
let start = Instant::now();
let prepared_space= (&test).into_iter().map(|x|prepare_messages_to_precomputed_space(&space, x)).collect::<Vec<HashMap<[u8;288],usize>>>();
println!("k: {} {:.2?} precomputing message space", k,start.elapsed());
let start = Instant::now();
let prepared_solved = find_in_precomputed_space(&gts,&space,&prepared_space, k);
println!("k: {} {:.2?} lookup in prepared message space", k,start.elapsed());
assert_eq!(msg.to_vec(),prepared_solved);
}
#[test]
fn test_msg_space() {
//rayon::ThreadPoolBuilder::new().num_threads(1).build_global().unwrap();
test_msg_space_k(2);
test_msg_space_k(4);
test_msg_space_k(8);
//test_msg_space_k(16);
}