Compare commits

...
Sign in to create a new pull request.

2 commits

Author SHA1 Message Date
9cec2e7567 add migrate_gt_to_fj.exs 2025-05-03 14:00:16 -04:00
2337190a1b wip: until-destroyed-migrator 2024-11-15 17:32:51 -05:00
5 changed files with 328 additions and 0 deletions

52
migrate_gt_to_fj.exs Normal file
View file

@ -0,0 +1,52 @@
Mix.install([
{:req, "~> 0.5.10"}
]);
forgejo_token = System.get_env("FORGEJO_TOKEN")
gitea_token = System.get_env("GITEA_TOKEN")
gt_host = System.get_env("GITEA_HOST")
fj_host = System.get_env("FORGEJO_HOST")
gt_uid = System.get_env("GITEA_UID")
defmodule Forgejo do
require Req
def migrate_all(uid, gt_host, gt_token, fj_host, fj_token) do
url = "#{gt_host}/api/v1/repos/search"
params = %{"uid" => uid}
case Req.get!(url, params: params, headers: %{"Authorization" => "bearer #{gt_token}"} ) do
%{status: 200, body: body} ->
repos = body["data"]
for repo <- repos do
migrate(repo, host, gt_token, fj_host, fj_token)
end
{:error, error} ->
IO.puts("Error fetching repos: #{inspect(error)}")
end
end
def migrate(repo, gt_token, fj_host, fj_token) do
clone = repo["clone_url"]
name = repo["name"]
private? = repo["private"]
migration = %{
"private" => private?,
"clone_addr" => clone,
"auth_token" => gt_token,
"pull_requests" => true,
"lfs" => true,
"releases" => true,
"repo_name" => name,
"repo_owner" => "caleb",
"service" => "gitea"
} |> :json.encode()
Req.post!("#{fj_host}/api/v1/repos/migrate", body: migration, headers: %{"Authorization" => "bearer #{fj_token}", "Content-Type" => "application/json"} )
end
end
Forgejo.migrate_all(gt_uid, gt_host, gitea_token, fj_host, forgejo_token)

175
until-destroyed-migrator/.gitignore vendored Normal file
View file

@ -0,0 +1,175 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View file

@ -0,0 +1,15 @@
# until-destroyed-migrator
To install dependencies:
```bash
bun install
```
To run:
```bash
bun run index.ts
```
This project was created using `bun init` in bun v1.0.23. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

Binary file not shown.

View file

@ -0,0 +1,86 @@
import { Glob } from "bun";
import babelParser from "@babel/parser";
let dir = "/Users/caleb.webber/git/snag/Web.MarketplaceAdmin/";
let glob = new Glob("!{node_modules}/**/*.ts");
const files = [...glob.scanSync(dir)].filter((f) => !f.includes("e2e"));
export function parse(text: string) {
return babelParser.parse(text, {
allowImportExportEverywhere: true,
plugins: ["decorators-legacy", "typescript"],
});
}
export async function fileContainsNgNeat(file: string) {
const text = await Bun.file(`${dir}${file}`).text();
return text.includes("ngneat");
}
export function getImportFrom(tree: any, module: string) {
return tree.program.body.find(
(n: any) => n.type === "ImportDeclaration" && n.source.value === module,
);
}
export function ensureAngularCoreImports(tree: any) {
const t2 = structuredClone(tree);
const angularCoreImport = getImportFrom(t2, "@angular/core") ?? {
type: "ImportDeclaration",
specifiers: [],
source: { type: "StringLiteral", value: "@angular/core" },
};
angularCoreImport.specifiers.push({
type: "ImportSpecifier",
imported: { type: "Identifier", name: "inject" },
local: { type: "Identifier", name: "inject" },
});
angularCoreImport.specifiers.push({
type: "ImportSpecifier",
imported: { type: "Identifier", name: "takeUntilDestroyed" },
local: { type: "Identifier", name: "takeUntilDestroyed" },
});
return t2;
}
export function removeNgNeatImports(tree: any) {
const t2 = structuredClone(tree);
t2.program.body = t2.program.body.filter(
(n: any) =>
n.type !== "ImportDeclaration" || !n.source.value.includes("ngneat"),
);
return t2;
}
const filesToFix = (
await Promise.all(files.map(async (f) => [await fileContainsNgNeat(f), f]))
)
.filter(([shouldFix]) => shouldFix)
.map(([, f]) => f);
export const trees = await Promise.all(
filesToFix.map(async (f) => {
const text = await Bun.file(`${dir}${f}`).text();
return parse(text);
}),
);
for (const f of filesToFix) {
const text = await Bun.file(`${dir}${f}`).text();
try {
const parsed = parse(text);
console.log(`could parse ${f} - ${parsed.type === "File"}`);
} catch (e) {
console.error(`could not parse ${f}`);
console.error(e);
}
}
console.log(`Found ${filesToFix.length} files to fix`);