Delete test directory

This commit is contained in:
CMLiussss
2023-12-27 23:38:49 +08:00
committed by GitHub
parent 7312382a26
commit c4954dc43f
10 changed files with 0 additions and 8023 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1,73 +0,0 @@
let isCancel = false;
const readableStream = new ReadableStream({
start(controller) {
let count = 0;
controller.enqueue(`Chunk ${count}`);
count++;
controller.enqueue(`Chunk ${count}`);
// controller.error(new Error('uuid is not valid'));
// setTimeout(() => {
// console.log('ReadableStream was closed------valid22-------.');
// controller.error(new Error('uuid is not valid22'));
// }, 1000);
// const intervalId = setInterval(() => {
// if(!isCancel){
// controller.enqueue(`Chunk ${count}`);
// }
// // controller.enqueue(`Chunk ${count}`);
// count++;
// if (count > 5) {
// console.log('ReadableStream was closed-------------.');
// // controller.close()
// controller.error(new Error('uuid is not valid'));
// // clearInterval(intervalId);
// }
// }, 1000);
},
async pull(controller) {
console.log('ReadableStream Pulling data...');
// await new Promise((resolve) => setTimeout(resolve, 2000));
},
cancel() {
isCancel = true;
console.log('ReadableStream was canceled.');
},
});
const writableStream = new WritableStream({
write(chunk, controller) {
console.log(`Received data: ${chunk}`);
if(chunk === 'Chunk 1'){
controller.error('eroorooororo')
return;
}
// throw new Error('uuid is not valid');
// setTimeout( ()=>{
// try {
// throw new Error('setTimeout hasve error valid');
// }catch(error){
// console.log('////setTimeout hasve error valid');
// }
// }, 2000)
// controller.error(new Error('Received error'));
if(chunk === 'Chunk 3'){
throw new Error('uuid is not valid');
}
},
close() {
console.log('WritableStream was closed');
},
abort() {
console.log('WritableStream was aborted');
}
});
readableStream.pipeTo(writableStream).catch((err) => {
console.log('-----------------------error-------------------');
console.log(err);
});

View File

@@ -1,11 +0,0 @@
export default {
async fetch(request, env, ctx) {
const url = new URL(request.url);
const address = url.searchParams.get("address");
if(!address){
return new Response('not pass address', { status: 200 });
}
const resp = fetch(`http://${address}/cdn-cgi/trace`);
return new Response((await resp).body, { status: 200 });
}
};

View File

@@ -1,22 +0,0 @@
export default {
/**
* @param {import("@cloudflare/workers-types").Request} request
* @param {{uuid: string}} env
* @param {import("@cloudflare/workers-types").ExecutionContext} ctx
* @returns {Promise<Response>}
*/
async fetch(request, env, ctx) {
const headers = {};
for (const [name, value] of request.headers.entries()) {
headers[name] = value;
}
const result = {
"http-header": headers,
"cf": request.cf
}
const headersJson = JSON.stringify(result);
console.log(headersJson);
return new Response(headersJson, { status: 200 });
}
};

View File

@@ -1,93 +0,0 @@
import IPCIDR from 'ip-cidr';
const chunk = '0'.repeat(1024 * 5);
export default {
async fetch(request, env, ctx) {
const isin = checkIPInCIDR("192.168.1.1", "102.1.5.2/24");
return new Response(null, {
status: 101
});
},
};
function checkIPInCIDR(ip, cidr) {
const cidrObject = new IPCIDR(cidr);
// Check if the IP address is valid
// if (!cidrObject.isValidAddress(ip)) {
// return false;
// }
// Check if the IP address is within the CIDR range
return cidrObject.contains(ip);
}
function delay(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
/**
* Checks if an IPv4 address is within a CIDR range.
*
* @param {string} address The IPv4 address to check.
* @param {string} cidr The CIDR range to check against.
* @returns {boolean} `true` if the address is within the CIDR range, `false` otherwise.
*/
function isIPv4InRange(address, cidr) {
// Parse the address and CIDR range
const addressParts = address.split('.').map(part => parseInt(part, 10));
const [rangeAddress, rangePrefix] = cidr.split('/');
const rangeParts = rangeAddress.split('.').map(part => parseInt(part, 10));
const prefix = parseInt(rangePrefix, 10);
// Convert the address and range to binary format
const addressBinary = addressParts.reduce((acc, part) => acc + part.toString(2).padStart(8, '0'), '');
const rangeBinary = rangeParts.reduce((acc, part) => acc + part.toString(2).padStart(8, '0'), '');
// Compare the bits up to the prefix length
for (let i = 0; i < prefix; i++) {
if (addressBinary[i] !== rangeBinary[i]) {
return false;
}
}
return true;
}
/**
* Checks if an IPv6 address is within a CIDR range.
*
* @param {string} address The IPv6 address to check.
* @param {string} cidr The CIDR range to check against.
* @returns {boolean} `true` if the address is within the CIDR range, `false` otherwise.
*/
function isIPv6InRange(address, cidr) {
// Parse the address and CIDR range
const addressParts = address.split(':').map(part => parseInt(part, 16));
const [rangeAddress, rangePrefix] = cidr.split('/');
const rangeParts = rangeAddress.split(':').map(part => parseInt(part, 16));
const prefix = parseInt(rangePrefix, 10);
// Convert the address and range to binary format
const addressBinary = addressParts.reduce((acc, part) => acc + part.toString(2).padStart(16, '0'), '');
const rangeBinary = rangeParts.reduce((acc, part) => acc + part.toString(2).padStart(16, '0'), '');
// Compare the bits up to the prefix length
for (let i = 0; i < prefix; i++) {
if (addressBinary[i] !== rangeBinary[i]) {
return false;
}
}
return true;
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +0,0 @@
export default {
async fetch(request, env, ctx) {
const upgradeHeader = request.headers.get('Upgrade');
if (!upgradeHeader || upgradeHeader !== 'websocket') {
return new Response('not websocket', { status: 200 });
}
const webSocketPair = new WebSocketPair();
const [client, webSocket] = Object.values(webSocketPair);
webSocket.accept();
let count = 0;
const readableStream = new ReadableStream({
start(controller) {
setInterval(() => {
controller.enqueue(count);
count++;
}, 500)
},
async pull(controller) {
},
cancel() {
console.log('ReadableStream was canceled.');
},
});
const writableStream = new WritableStream({
write(chunk, controller) {
console.log(`Received data: ${chunk}`);
webSocket.send(`Received data: ${chunk}`);
if (chunk === 3) {
controller.error('eroorooororo')
return;
}
},
close() {
console.log('WritableStream was closed');
},
abort() {
console.log('WritableStream was aborted');
}
});
readableStream.pipeTo(writableStream).catch((error) => {
console.log('pipeTo error', error);
webSocket.close();
});
webSocket.addEventListener('close', () => {
console.log('close');
});
return new Response(null, {
status: 101,
webSocket: client,
});
}
};

View File

@@ -1,78 +0,0 @@
import { connect } from 'cloudflare:sockets';
export default {
async fetch(request, env, ctx) {
console.log('start fetch111');
const url = new URL(request.url);
const target = url.searchParams.get('target');
// if (!target) {
// return new Response('target is empty', {
// status: 500,
// });
// }
try {
try {
/** @type {import("@cloudflare/workers-types").Socket}*/
const socket = connect(
{
hostname: target,
port: 443,
}
);
// socket.closed.then(() => {
// console.log('....socket.closed.then............');
// }).catch((e) => {
// console.log('.........socket.closed.error.............', e);
// }).finally(() => {
// console.log('.........socket.closed.finally.............');
// })
// console.log('---------------close-------');
// socket.readable.getReader().closed.then(() => {
// console.log('.........socket.readabl.....closed then.............');
// }).catch((e) => {
// console.log('....socket.readabl.....catch closing.............', e);
// })
await socket.writable.getWriter().write(new Uint8Array([1,2,3,4,5,6,7,8,9,10]))
// await delay(10)
} catch (e) {
console.log('connect error', e);
}
console.log('start conneted', target);
// const writer = socket.writable.getWriter();
// const encoder = new TextEncoder();
// const encoded = encoder.encode(
// `GET / HTTP/1.1\r\nHost: ${target}\r\nUser-Agent: curl/8.0.1\r\nAccept: */*\r\n\r\n`
// );
// await writer.write(encoded);
// // await writer.close();
// console.log('write end');
// await delay(1)
return new Response('yyyyyyyyyyyyyyyyyyyyyyyyyy', {
headers: { 'Content-Type': 'text/plain' },
status: 500,
});
} catch (error) {
console.log('Socket connection failed: ' + error);
return new Response('Socket connection failed: ' + error, {
status: 500,
});
}
},
};
function delay(timeout) {
return new Promise((resolve) => {
setTimeout(resolve, timeout);
});
}

View File

@@ -1,6 +0,0 @@
name = "cf-worker-http-header" # todo
main = "./cf-worker-http-header.js"
compatibility_date = "2023-05-26"
[vars]
UUID = "example_dev_token"

View File

@@ -1,60 +0,0 @@
const chunk = '0'.repeat(1024 * 5);
export default {
async fetch(request, env, ctx) {
try {
console.log('---------------');
const webSocketPair = new WebSocketPair();
/** @type {import("@cloudflare/workers-types").WebSocket[]} */
const [client, webSocket] = Object.values(webSocketPair);
webSocket.accept();
let btyes = 0;
// (async () => {
// const repose = await fetch('http://speed.cloudflare.com/__down?bytes=1145141919810')
// const body = repose.body;
// const reader = body?.getReader();
// let packets = [];
// while (true && reader) {
// const { done, value } = await reader.read();
// packets.push(value);
// console.log(btyes += value?.length || 0);
// if (packets.length > 100) {
// webSocket.send(value || '');
// await delay(2);
// packets = [];
// }
// if (done) {
// break;
// }
// }
// })()
console.log('---------------');
(async () => {
let packets = [];
console.log('---------------');
while (true) {
console.log(btyes += chunk?.length || 0);
webSocket.send(chunk || '');
await delay(1)
}
})()
// console.log(btyes += chunk?.length || 0);
// webSocket.send(chunk || '');
return new Response(null, {
status: 101,
webSocket: client,
});
} catch (err) {
/** @type {Error} */ let e = err;
return new Response(e.toString());
}
},
};
function delay(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}