Compare commits

...

4 Commits

Author SHA1 Message Date
dereklseitz
e87a8153c9 maintenance: Clean up codebase and improve formatting
• Remove placeholder comments and unused code
• Standardize inline styles and formatting
• Update component structure and organization
2025-09-28 10:47:30 -05:00
dereklseitz
918121cd66 feat: Modularized Editor into Wysiwyg and Markdown/Viewer components
- Add MetedataEditor for meta inputs
    - Add toggle between editor modes
    - Add Breadcrumb for clearer navigation
    - Add custom tool for formatting inline code in WysiwygEditor
2025-09-28 10:44:44 -05:00
dereklseitz
77c385dc9b feat: Add custom icons to external links in PanelBar 2025-09-28 10:39:11 -05:00
dereklseitz
95cb2e6ff1 Refactor: Clean up unused files and placeholder components 2025-09-28 10:38:21 -05:00
53 changed files with 1349 additions and 582 deletions

View File

@@ -4,10 +4,27 @@
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Vite + React</title>
<title>Campfire Logs Dashboard</title>
<!-- Preload critical resources -->
<link rel="preload" href="/src/assets/images/campfire_logs_square_logo_bg_match.png" as="image" fetchpriority="high" />
<link rel="preload" href="/src/assets/css/index.css" as="style" />
<link rel="preload" href="/src/main.jsx" as="script" />
<!-- Preconnect to external domains -->
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
</head>
<body>
<div id="root"></div>
<!-- Preload LCP image in HTML for early discovery -->
<img
src="/src/assets/images/campfire_logs_square_logo_bg_match.png"
alt="Campfire Logs Logo"
style="display: none;"
loading="eager"
fetchpriority="high"
/>
<script type="module" src="/src/main.jsx"></script>
</body>
</html>

230
package-lock.json generated
View File

@@ -16,6 +16,7 @@
"@progress/kendo-react-dateinputs": "^12.0.1",
"@progress/kendo-react-dialogs": "^12.0.1",
"@progress/kendo-react-dropdowns": "^12.0.1",
"@progress/kendo-react-editor": "^12.0.1",
"@progress/kendo-react-grid": "^12.0.1",
"@progress/kendo-react-indicators": "^12.0.1",
"@progress/kendo-react-inputs": "^12.0.1",
@@ -1081,7 +1082,6 @@
"resolved": "https://registry.npmjs.org/@progress/kendo-common/-/kendo-common-1.0.2.tgz",
"integrity": "sha512-PHxnquetSmtmXiF4dmlQiypzXaFLUEPK3VAOHxmnRDrLxaPrcZfaW9FOOiyur8hv4QmXlohISMwMElZS8Xi1Ag==",
"license": "SEE LICENSE IN LICENSE.md",
"peer": true,
"dependencies": {
"tslib": "^1.7.0"
}
@@ -1090,8 +1090,7 @@
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
"license": "0BSD",
"peer": true
"license": "0BSD"
},
"node_modules/@progress/kendo-data-query": {
"version": "1.7.1",
@@ -1158,6 +1157,35 @@
"@progress/pako-esm": "^1.0.1"
}
},
"node_modules/@progress/kendo-editor-common": {
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/@progress/kendo-editor-common/-/kendo-editor-common-1.12.3.tgz",
"integrity": "sha512-vuB0ZE60uBqBTPOEP4YHkr1yJIVB4/HEq5YzrpH1CRuhn1Oexq1r0hItw+VgAEw3cfwdXQnCi20ZtnTvH7GepQ==",
"license": "SEE LICENSE IN LICENSE.md",
"dependencies": {
"@progress/kendo-common": "^1.0.2",
"prosemirror-commands": "1.7.1",
"prosemirror-dropcursor": "1.8.2",
"prosemirror-gapcursor": "1.3.2",
"prosemirror-history": "1.4.1",
"prosemirror-inputrules": "1.5.0",
"prosemirror-keymap": "1.2.3",
"prosemirror-model": "1.25.1",
"prosemirror-schema-list": "1.5.1",
"prosemirror-state": "1.4.3",
"prosemirror-tables": "1.7.1",
"prosemirror-transform": "1.10.4",
"prosemirror-view": "1.39.3",
"tslib": "^2.8.1"
}
},
"node_modules/@progress/kendo-file-saver": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@progress/kendo-file-saver/-/kendo-file-saver-1.1.2.tgz",
"integrity": "sha512-hWpJ67L8b2+GIhsIWR09NgGaEh87jvcHv7kScC671cbVWJycXTGqdy3ZoI0pzIaH8K0IgP2TNkF1ay4HGxe+pg==",
"license": "Apache-2.0",
"peer": true
},
"node_modules/@progress/kendo-inputs-common": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@progress/kendo-inputs-common/-/kendo-inputs-common-3.1.2.tgz",
@@ -1369,6 +1397,33 @@
"react-dom": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/@progress/kendo-react-editor": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/@progress/kendo-react-editor/-/kendo-react-editor-12.1.0.tgz",
"integrity": "sha512-2T3PUF1nsWmkjy7TaFCaTtnDF/8Uf0p+BJKMydc5376m2YPyHqHqjRCQwD9o+rehuHxwK41Fpq1iK9bZcU61cg==",
"license": "SEE LICENSE IN LICENSE.md",
"dependencies": {
"@progress/kendo-editor-common": "1.12.3",
"prop-types": "^15.6.0"
},
"peerDependencies": {
"@progress/kendo-drawing": "^1.21.2",
"@progress/kendo-licensing": "^1.7.0",
"@progress/kendo-react-buttons": "12.1.0",
"@progress/kendo-react-common": "12.1.0",
"@progress/kendo-react-dialogs": "12.1.0",
"@progress/kendo-react-dropdowns": "12.1.0",
"@progress/kendo-react-form": "12.1.0",
"@progress/kendo-react-inputs": "12.1.0",
"@progress/kendo-react-intl": "12.1.0",
"@progress/kendo-react-layout": "12.1.0",
"@progress/kendo-react-pdf": "12.1.0",
"@progress/kendo-react-popup": "12.1.0",
"@progress/kendo-svg-icons": "^4.0.0",
"react": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc",
"react-dom": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/@progress/kendo-react-form": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/@progress/kendo-react-form/-/kendo-react-form-12.1.0.tgz",
@@ -1546,6 +1601,24 @@
"react-dom": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/@progress/kendo-react-pdf": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/@progress/kendo-react-pdf/-/kendo-react-pdf-12.1.0.tgz",
"integrity": "sha512-WR8cqBgqOvl/9ghmwTeHaUtt177BQUPv+SXUWuVvVtMMglZgaSRnz+NR74pCgbUUsKSmOJ13UhBS2hGMFE/M9Q==",
"license": "SEE LICENSE IN LICENSE.md",
"peer": true,
"dependencies": {
"prop-types": "^15.6.0"
},
"peerDependencies": {
"@progress/kendo-drawing": "^1.21.2",
"@progress/kendo-file-saver": "^1.0.1",
"@progress/kendo-licensing": "^1.7.0",
"@progress/kendo-react-common": "12.1.0",
"react": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc",
"react-dom": "^16.8.2 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/@progress/kendo-react-popup": {
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/@progress/kendo-react-popup/-/kendo-react-popup-12.1.0.tgz",
@@ -3983,6 +4056,12 @@
"node": ">= 0.8.0"
}
},
"node_modules/orderedmap": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/orderedmap/-/orderedmap-2.1.1.tgz",
"integrity": "sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==",
"license": "MIT"
},
"node_modules/p-limit": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
@@ -4145,6 +4224,136 @@
"react-is": "^16.13.1"
}
},
"node_modules/prosemirror-commands": {
"version": "1.7.1",
"resolved": "https://registry.npmjs.org/prosemirror-commands/-/prosemirror-commands-1.7.1.tgz",
"integrity": "sha512-rT7qZnQtx5c0/y/KlYaGvtG411S97UaL6gdp6RIZ23DLHanMYLyfGBV5DtSnZdthQql7W+lEVbpSfwtO8T+L2w==",
"license": "MIT",
"dependencies": {
"prosemirror-model": "^1.0.0",
"prosemirror-state": "^1.0.0",
"prosemirror-transform": "^1.10.2"
}
},
"node_modules/prosemirror-dropcursor": {
"version": "1.8.2",
"resolved": "https://registry.npmjs.org/prosemirror-dropcursor/-/prosemirror-dropcursor-1.8.2.tgz",
"integrity": "sha512-CCk6Gyx9+Tt2sbYk5NK0nB1ukHi2ryaRgadV/LvyNuO3ena1payM2z6Cg0vO1ebK8cxbzo41ku2DE5Axj1Zuiw==",
"license": "MIT",
"dependencies": {
"prosemirror-state": "^1.0.0",
"prosemirror-transform": "^1.1.0",
"prosemirror-view": "^1.1.0"
}
},
"node_modules/prosemirror-gapcursor": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/prosemirror-gapcursor/-/prosemirror-gapcursor-1.3.2.tgz",
"integrity": "sha512-wtjswVBd2vaQRrnYZaBCbyDqr232Ed4p2QPtRIUK5FuqHYKGWkEwl08oQM4Tw7DOR0FsasARV5uJFvMZWxdNxQ==",
"license": "MIT",
"dependencies": {
"prosemirror-keymap": "^1.0.0",
"prosemirror-model": "^1.0.0",
"prosemirror-state": "^1.0.0",
"prosemirror-view": "^1.0.0"
}
},
"node_modules/prosemirror-history": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/prosemirror-history/-/prosemirror-history-1.4.1.tgz",
"integrity": "sha512-2JZD8z2JviJrboD9cPuX/Sv/1ChFng+xh2tChQ2X4bB2HeK+rra/bmJ3xGntCcjhOqIzSDG6Id7e8RJ9QPXLEQ==",
"license": "MIT",
"dependencies": {
"prosemirror-state": "^1.2.2",
"prosemirror-transform": "^1.0.0",
"prosemirror-view": "^1.31.0",
"rope-sequence": "^1.3.0"
}
},
"node_modules/prosemirror-inputrules": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/prosemirror-inputrules/-/prosemirror-inputrules-1.5.0.tgz",
"integrity": "sha512-K0xJRCmt+uSw7xesnHmcn72yBGTbY45vm8gXI4LZXbx2Z0jwh5aF9xrGQgrVPu0WbyFVFF3E/o9VhJYz6SQWnA==",
"license": "MIT",
"dependencies": {
"prosemirror-state": "^1.0.0",
"prosemirror-transform": "^1.0.0"
}
},
"node_modules/prosemirror-keymap": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/prosemirror-keymap/-/prosemirror-keymap-1.2.3.tgz",
"integrity": "sha512-4HucRlpiLd1IPQQXNqeo81BGtkY8Ai5smHhKW9jjPKRc2wQIxksg7Hl1tTI2IfT2B/LgX6bfYvXxEpJl7aKYKw==",
"license": "MIT",
"dependencies": {
"prosemirror-state": "^1.0.0",
"w3c-keyname": "^2.2.0"
}
},
"node_modules/prosemirror-model": {
"version": "1.25.1",
"resolved": "https://registry.npmjs.org/prosemirror-model/-/prosemirror-model-1.25.1.tgz",
"integrity": "sha512-AUvbm7qqmpZa5d9fPKMvH1Q5bqYQvAZWOGRvxsB6iFLyycvC9MwNemNVjHVrWgjaoxAfY8XVg7DbvQ/qxvI9Eg==",
"license": "MIT",
"dependencies": {
"orderedmap": "^2.0.0"
}
},
"node_modules/prosemirror-schema-list": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/prosemirror-schema-list/-/prosemirror-schema-list-1.5.1.tgz",
"integrity": "sha512-927lFx/uwyQaGwJxLWCZRkjXG0p48KpMj6ueoYiu4JX05GGuGcgzAy62dfiV8eFZftgyBUvLx76RsMe20fJl+Q==",
"license": "MIT",
"dependencies": {
"prosemirror-model": "^1.0.0",
"prosemirror-state": "^1.0.0",
"prosemirror-transform": "^1.7.3"
}
},
"node_modules/prosemirror-state": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/prosemirror-state/-/prosemirror-state-1.4.3.tgz",
"integrity": "sha512-goFKORVbvPuAQaXhpbemJFRKJ2aixr+AZMGiquiqKxaucC6hlpHNZHWgz5R7dS4roHiwq9vDctE//CZ++o0W1Q==",
"license": "MIT",
"dependencies": {
"prosemirror-model": "^1.0.0",
"prosemirror-transform": "^1.0.0",
"prosemirror-view": "^1.27.0"
}
},
"node_modules/prosemirror-tables": {
"version": "1.7.1",
"resolved": "https://registry.npmjs.org/prosemirror-tables/-/prosemirror-tables-1.7.1.tgz",
"integrity": "sha512-eRQ97Bf+i9Eby99QbyAiyov43iOKgWa7QCGly+lrDt7efZ1v8NWolhXiB43hSDGIXT1UXgbs4KJN3a06FGpr1Q==",
"license": "MIT",
"dependencies": {
"prosemirror-keymap": "^1.2.2",
"prosemirror-model": "^1.25.0",
"prosemirror-state": "^1.4.3",
"prosemirror-transform": "^1.10.3",
"prosemirror-view": "^1.39.1"
}
},
"node_modules/prosemirror-transform": {
"version": "1.10.4",
"resolved": "https://registry.npmjs.org/prosemirror-transform/-/prosemirror-transform-1.10.4.tgz",
"integrity": "sha512-pwDy22nAnGqNR1feOQKHxoFkkUtepoFAd3r2hbEDsnf4wp57kKA36hXsB3njA9FtONBEwSDnDeCiJe+ItD+ykw==",
"license": "MIT",
"dependencies": {
"prosemirror-model": "^1.21.0"
}
},
"node_modules/prosemirror-view": {
"version": "1.39.3",
"resolved": "https://registry.npmjs.org/prosemirror-view/-/prosemirror-view-1.39.3.tgz",
"integrity": "sha512-bY/7kg0LzRE7ytR0zRdSMWX3sknEjw68l836ffLPMh0OG3OYnNuBDUSF3v0vjvnzgYjgY9ZH/RypbARURlcMFA==",
"license": "MIT",
"dependencies": {
"prosemirror-model": "^1.20.0",
"prosemirror-state": "^1.0.0",
"prosemirror-transform": "^1.1.0"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
@@ -4306,6 +4515,12 @@
"fsevents": "~2.3.2"
}
},
"node_modules/rope-sequence": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/rope-sequence/-/rope-sequence-1.3.4.tgz",
"integrity": "sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ==",
"license": "MIT"
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -4562,8 +4777,7 @@
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"license": "0BSD",
"peer": true
"license": "0BSD"
},
"node_modules/type-check": {
"version": "0.4.0",
@@ -4694,6 +4908,12 @@
}
}
},
"node_modules/w3c-keyname": {
"version": "2.2.8",
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
"integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
"license": "MIT"
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",

View File

@@ -18,6 +18,7 @@
"@progress/kendo-react-dateinputs": "^12.0.1",
"@progress/kendo-react-dialogs": "^12.0.1",
"@progress/kendo-react-dropdowns": "^12.0.1",
"@progress/kendo-react-editor": "^12.0.1",
"@progress/kendo-react-grid": "^12.0.1",
"@progress/kendo-react-indicators": "^12.0.1",
"@progress/kendo-react-inputs": "^12.0.1",

View File

@@ -38,25 +38,18 @@ function App() {
)}
{isLoggedIn && location.pathname !== '/login' ? (
<div style={{ display: 'flex', minHeight: '100vh' }}>
<div className="app-layout">
{/* Always render the PanelBar on the left */}
<CampfirePanelBar
isExpanded={isPanelExpanded}
onPanelToggle={handlePanelToggle}
isLoggedIn={isLoggedIn}
>
{/* AppRoutes are the main content of the Panel */}
<AppRoutes isLoggedIn={isLoggedIn} onLogin={handleLogin} />
</CampfirePanelBar>
/>
{/* Render Dashboard on the right only when on /dashboard */}
{location.pathname === '/dashboard' && (
<div style={{ flex: 1, marginLeft: '100px' }}>
<Dashboard isLoggedIn={isLoggedIn}>
{/* Fixed width content area to prevent shifts */}
<div className="content-area">
<AppRoutes isLoggedIn={isLoggedIn} onLogin={handleLogin} />
</Dashboard>
</div>
)}
</div>
) : (
// If not logged in or on the login page, just render the routes

View File

@@ -13,6 +13,18 @@
-moz-osx-font-smoothing: grayscale;
}
/* Critical above-the-fold styles */
body {
margin: 0;
display: flex;
flex-direction: column;
justify-content: center;
place-items: center;
min-width: 320px;
min-height: 100vh;
background-color: #242424;
}
a {
font-weight: 500;
color: #646cff;
@@ -91,3 +103,131 @@ button:focus-visible {
opacity: 0;
transition: opacity 300ms;
}
/* Kendo Editor text color override */
.k-editor-content,
.k-editor-content *,
.k-editor-content p,
.k-editor-content div,
.k-editor-content span,
.k-editor-content strong,
.k-editor-content em,
.k-editor-content h1,
.k-editor-content h2,
.k-editor-content h3,
.k-editor-content h4,
.k-editor-content h5,
.k-editor-content h6,
.k-editor-content li,
.k-editor-content td,
.k-editor-content th,
.k-editor .k-editor-content,
.k-editor .k-editor-content *,
.k-editor .k-editor-content p,
.k-editor .k-editor-content div,
.k-editor .k-editor-content span,
.k-editor .k-editor-content strong,
.k-editor .k-editor-content em,
.k-editor * {
color: #f5f1e9 !important;
}
/* Prevent layout shifts */
.dashboard-content {
transition: opacity 0.2s ease-in-out;
margin-top: 0 !important;
}
.dashboard-content.content-wrapper {
margin-top: 20px !important;
padding-top: 20px !important;
min-height: 300px;
contain: layout;
}
.dashboard-content.content-wrapper h2 {
margin-top: 0 !important;
}
.dashboard-content.loading {
opacity: 0.7;
}
/* Force stable layout from initial load */
.app-layout {
width: 100%;
min-height: 100vh;
display: flex;
position: relative;
will-change: auto;
contain: layout;
transform: translateZ(0); /* Force hardware acceleration */
margin-top: 10px;
}
.content-area {
width: 1000px;
margin-left: 100px;
position: relative;
padding: 0 20px;
min-height: 400px;
contain: layout;
transform: translateY(-75px);
}
/* Ensure content doesn't shift during loading */
.content-wrapper {
width: 1000px;
margin: 0 auto;
min-height: 200px;
position: relative;
margin-top: 0 !important;
}
/* Prevent layout shifts in header area */
.k-appbar {
min-height: 60px !important;
height: 60px !important;
will-change: auto;
contain: layout;
}
/* Ensure header container has stable dimensions */
div[style*="display: flex"][style*="alignItems: center"] {
min-height: 200px !important;
contain: layout;
transform: translateZ(0); /* Force hardware acceleration */
margin-top: 10px;
}
/* Target the AppBar container specifically */
div[style*="display: flex"][style*="alignItems: center"][style*="height: auto"] {
margin-top: 10px !important;
}
/* Prevent breadcrumb layout shifts */
.k-breadcrumb {
min-height: 20px !important;
will-change: auto;
}
/* Ensure PanelBar has consistent positioning */
.k-panelbar {
position: relative !important;
will-change: auto;
}
/* Fix login form layout shifts */
.k-input {
width: 300px !important;
max-width: 300px !important;
min-width: 300px !important;
contain: layout;
}
/* Ensure login form container has stable dimensions */
form {
width: 350px !important;
margin: 0 auto !important;
contain: layout;
}

View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

Before

Width:  |  Height:  |  Size: 4.3 KiB

After

Width:  |  Height:  |  Size: 4.3 KiB

View File

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

Before

Width:  |  Height:  |  Size: 4.0 KiB

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="#f5f1e9">
<text x="12" y="16" text-anchor="middle" font-family="Arial, sans-serif" font-size="10" font-weight="bold" fill="#f5f1e9">DLS</text>
</svg>

After

Width:  |  Height:  |  Size: 241 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 MiB

View File

@@ -1,7 +1,11 @@
// AppBar.jsx
import React from 'react';
import { AppBar, AppBarSection, AppBarSpacer } from '@progress/kendo-react-layout';
import { Button } from '@progress/kendo-react-buttons';
import { Avatar } from '@progress/kendo-react-layout';
import { Link, useNavigate } from 'react-router-dom';
import Logo from '../assets/images/campfire_logs_square_logo_bg_match.png';
import CampfireBreadcrumb from './UI/Breadcrumb';
const CampfireAppBar = ({ isLoggedIn, onLogin, onDrawerToggle }) => {
const navigate = useNavigate();
@@ -12,16 +16,29 @@ const CampfireAppBar = ({ isLoggedIn, onLogin, onDrawerToggle }) => {
};
return (
<div>
<h1 style={{ color: "#ff5733", textAlign: "center" }}>Campfire Logs</h1>
<AppBar position="sticky" style={{ backgroundColor: "#edbd7d"}}>
<div style={{ display: 'flex', alignItems: 'center', height: 'auto', marginTop: '10px' }}>
<img
src={Logo}
alt="Campfire Logs Logo"
loading="eager"
fetchPriority="high"
style={{
width: "200px",
height: "200px",
cursor: "pointer",
borderRadius: "50%",
border: "2px solid #edbd7d",
objectFit: "cover",
position: "relative",
zIndex: 2
}}
onClick={() => navigate('/dashboard')}
/>
<AppBar position="sticky" style={{ backgroundColor: "#242424", display: 'flex', justifyContent: 'space-between', flex: 1, marginLeft: "-100px", minHeight: '60px' }}>
<AppBarSection>
<CampfireBreadcrumb />
</AppBarSection>
<AppBarSection>
<Button look="flat" onClick={onDrawerToggle}>
<span className="k-icon k-i-menu" />
</Button>
<AppBarSpacer style={{ width: 800 }} />
<Link to="/dashboard">
<Button look="flat">Dashboard</Button>
</Link>
@@ -31,11 +48,9 @@ const CampfireAppBar = ({ isLoggedIn, onLogin, onDrawerToggle }) => {
<Link to="/editor">
<Button look="flat">+ New Post</Button>
</Link>
</AppBarSection>
<AppBarSpacer style={{width: 50 }} />
<AppBarSpacer style={{ width: 20 }} />
<AppBarSection>
{isLoggedIn ? (
<Button look="flat" onClick={handleLogout}>
Logout
@@ -45,6 +60,7 @@ const CampfireAppBar = ({ isLoggedIn, onLogin, onDrawerToggle }) => {
<Button look="flat">Login</Button>
</Link>
)}
</AppBarSection>
</AppBar>
</div>

View File

@@ -11,25 +11,20 @@ const PostCard = ({ post, onEdit }) => {
const formatDate = (utcString) => {
if (!utcString) return '';
// 1. Create Date object. It parses the UTC string automatically.
const date = new Date(utcString);
// 2. Define options for the desired output format (local time zone)
const dateOptions = {
year: 'numeric',
month: 'short', // e.g., 'Sep'
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true, // e.g., '9:51 PM'
timeZoneName: undefined // Removes the timezone name from the output
hour12: true,
timeZoneName: undefined
};
// 3. Convert to local, human-readable string and clean up formatting
// The space between date and time may vary by locale, so we use string replacement for consistency.
const formattedDate = date.toLocaleDateString(undefined, dateOptions);
// This attempts to convert the format '9/20/2025, 9:51 PM' to 'Sep 20, 2025, 9:51 PM'
const parts = formattedDate.split(',');
if (parts.length > 1) {
const timePart = date.toLocaleTimeString(undefined, { hour: 'numeric', minute: '2-digit', hour12: true });
@@ -41,7 +36,7 @@ const PostCard = ({ post, onEdit }) => {
};
return (
<Card key={post.slug} style={{ width: '250px', textAlign: 'center', display: 'flex' }}>
<Card key={post.slug} style={{ width: 'calc(100% - 50px)', textAlign: 'center', display: 'flex' }}>
{/* 1. Thumbnail Image */}
<CardImage

View File

@@ -0,0 +1,68 @@
// MarkdownEditor.jsx
// KendoReact Splitter implementation for dual-pane markdown editing with live preview
// Demonstrates advanced layout components and real-time markdown processing
import React, { useState } from 'react';
import { Splitter, SplitterPane } from '@progress/kendo-react-layout';
import { marked } from 'marked';
function MarkdownEditor({ markdownContent, onMarkdownChange, onSplitterChange, panes }) {
return (
// KendoReact Splitter for dual-pane markdown editing with live preview
<Splitter
style={{ height: '575px', width: '100%' }}
panes={panes}
onChange={onSplitterChange}
>
<SplitterPane>
<div style={{ padding: '10px' }}>
<h4 style={{ margin: '0 0 10px 0' }}>Markdown Editor</h4>
<textarea
value={markdownContent}
onChange={onMarkdownChange}
style={{
width: '100%',
height: '500px',
fontFamily: 'Arial, sans-serif',
fontSize: '14px',
lineHeight: '1.5',
backgroundColor: '#f5f1e9',
color: '#3d3d3d',
resize: 'none',
border: '1px solid #e0e0e0',
borderRadius: '8px',
padding: '10px',
outline: 'none',
marginBottom: '10px'
}}
placeholder="Enter markdown content..."
/>
</div>
</SplitterPane>
<SplitterPane>
<div style={{ padding: '10px' }}>
<h4 style={{ margin: '0 0 10px 0' }}>Live Preview</h4>
<div
style={{
height: '500px',
overflow: 'auto',
border: '1px solid #e0e0e0',
borderRadius: '8px',
padding: '10px',
fontFamily: 'Arial, sans-serif',
fontSize: '14px',
lineHeight: '1.5',
backgroundColor: '#f5f1e9',
color: '#3d3d3d',
marginBottom: '10px'
}}
dangerouslySetInnerHTML={{
__html: marked(markdownContent)
}}
/>
</div>
</SplitterPane>
</Splitter>
);
}
export default MarkdownEditor;

View File

@@ -0,0 +1,71 @@
// MetadataEditor.jsx
import React, { useState } from 'react';
import { Input } from '@progress/kendo-react-inputs';
import { Label } from '@progress/kendo-react-labels';
import { Button } from '@progress/kendo-react-buttons';
function MetadataEditor({ postData }) {
const [title, setTitle] = useState(postData?.title || '');
const [tags, setTags] = useState(postData?.tags?.join(', ') || '');
const [headerImage, setHeaderImage] = useState(postData?.header?.image || '');
const handleTitleChange = (e) => {
setTitle(e.target.value);
};
const handleTagsChange = (e) => {
setTags(e.target.value);
};
const handleHeaderImageChange = (e) => {
const file = e.target.files[0];
if (file) {
console.log('Header image uploaded:', file);
}
};
return (
<div style={{ padding: '20px', borderBottom: '1px solid #e0e0e0' }}>
<div style={{ display: 'flex', gap: '20px', marginBottom: '15px', alignItems: 'center' }}>
<div>
<input
type="file"
accept=".webp,.png,.jpg,.jpeg,.avif,.svg"
onChange={handleHeaderImageChange}
style={{ display: 'none' }}
id="header-image-upload"
/>
<Button
onClick={() => document.getElementById('header-image-upload').click()}
>
Upload Header Image
</Button>
</div>
<div style={{ flex: 1, display: 'flex', alignItems: 'center', gap: '10px' }}>
<Label style={{ minWidth: '60px' }}>Title</Label>
<Input
value={title}
onChange={handleTitleChange}
placeholder="Enter post title"
style={{ flex: 1 }}
/>
</div>
<div style={{ flex: 1, display: 'flex', alignItems: 'center', gap: '10px' }}>
<Label style={{ minWidth: '60px' }}>Tags</Label>
<Input
value={tags}
onChange={handleTagsChange}
placeholder="Enter tags (comma separated)"
style={{ flex: 1 }}
/>
</div>
</div>
</div>
);
}
export default MetadataEditor;

View File

@@ -1,44 +1,91 @@
import {
RichTextEditorComponent,
Inject,
Toolbar,
HtmlEditor,
MarkdownEditor,
SourceCode,
Link, Image
} from '@syncfusion/ej2-react-richtexteditor';
import * as React from 'react';
// WysiwygEditor.jsx
// KendoReact Editor implementation for HTML editing with custom toolbar
// Demonstrates custom tool integration and advanced editor configuration
import React, { useState } from 'react';
import { Editor, EditorTools, EditorToolsSettings } from '@progress/kendo-react-editor';
import InlineCodeTool from './custom/InlineCodeTool';
function CampfireToggleEditor(props) {
const toolbarOptions = {
items: [
'Bold', 'Italic', 'Underline', '|',
'Formats', 'Alignments', 'OrderedList', 'UnorderedList', '|',
'CreateLink', 'Image', '|',
'SourceCode',
'Undo', 'Redo'
]
const {
Bold, Italic, Underline, Strikethrough,
Subscript, Superscript,
CleanFormatting,
Indent, Outdent,
OrderedList, UnorderedList,
NumberedList,
BulletedList,
Undo, Redo,
Link, Unlink, InsertImage, ViewHtml,
InsertFile,
SelectAll,
Print,
Pdf,
} = EditorTools;
// Custom inline code tool for the editor toolbar
const inlineCode = InlineCodeTool;
function WysiwygEditor({ content, onContentChange }) {
// KendoReact Editor change handler
const handleEditorChange = (event) => {
onContentChange(event.html);
};
return (
<RichTextEditorComponent
value={props.value}
change={handleEditorChange}
editorMode={'Html'}
toolbarSettings={toolbarOptions}
>
<Inject
services={[
Toolbar,
Link,
Image,
HtmlEditor,
MarkdownEditor,
SourceCode
]}
/>
</RichTextEditorComponent>
);
};
export default CampfireToggleEditor;
// KendoReact Editor toolbar configuration with custom inline code tool
const tools = [
[Bold, Italic, Underline, Strikethrough],
[Subscript, Superscript],
[inlineCode], // Custom tool for inline code formatting
[CleanFormatting],
[Indent, Outdent],
[OrderedList, UnorderedList],
[NumberedList, BulletedList],
[SelectAll],
[Undo, Redo],
[Link, Unlink, InsertImage, InsertFile, ViewHtml]
];
return (
<div>
<style>
{`
.k-editor-content a {
color: var(--kendo-color-primary, #d94f27) !important;
text-decoration: underline;
}
.k-editor-content a:hover {
color: var(--kendo-color-primary-hover, #ff6f48) !important;
text-decoration: underline;
}
.k-editor-content a:visited {
color: var(--kendo-color-primary, #d94f27) !important;
}
.k-textarea {
background-color: #f5f1e9 !important;
}
`}
</style>
{/* KendoReact Editor with custom toolbar and styling */}
<div style={{ width: '1000px' }}>
<Editor
value={content}
onChange={handleEditorChange}
tools={tools}
style={{
height: '400px',
width: '1000px'
}}
contentStyle={{
fontFamily: 'Arial, sans-serif',
fontSize: '14px',
lineHeight: '1.5',
backgroundColor: '#f5f1e9'
}}
className="custom-editor"
/>
</div>
</div>
);
}
export default WysiwygEditor;

View File

@@ -0,0 +1,26 @@
// EditorModeToggle.jsx
// Custom KendoReact component for switching between HTML and Markdown editing modes
// Demonstrates reusable UI components and state management patterns
import React from 'react';
import { Button } from '@progress/kendo-react-buttons';
function EditorModeToggle({ editMode, onToggle }) {
return (
// Toggle button positioned above the editor with consistent styling
<div style={{
display: 'flex',
justifyContent: 'flex-end',
marginBottom: '8px'
}}>
<Button
onClick={onToggle}
size="small"
icon={editMode === 'html' ? 'code' : 'edit'}
>
{editMode === 'html' ? 'Switch to Markdown' : 'Switch to HTML'}
</Button>
</div>
);
}
export default EditorModeToggle;

View File

@@ -0,0 +1,49 @@
// InlineCodeTool.jsx
// Custom KendoReact Editor tool for inline code formatting
import React from 'react';
import { Button } from '@progress/kendo-react-buttons';
import { SvgIcon } from '@progress/kendo-react-common';
import { codeSnippetIcon } from '@progress/kendo-svg-icons';
const InlineCodeTool = (props) => {
const { view } = props;
const handleClick = () => {
if (view) {
// Access ProseMirror editor state and dispatch
const { state, dispatch } = view;
const markType = state.schema.marks.code;
const { from, to } = state.selection;
if (markType) {
const tr = state.tr;
// Check if selected text already has code mark
const hasMark = state.doc.rangeHasMark(from, to, markType);
if (hasMark) {
// Remove code mark if already present
tr.removeMark(from, to, markType);
} else {
// Add code mark to selected text
tr.addMark(from, to, markType.create());
}
// Dispatch transaction to apply changes
dispatch(tr);
}
}
};
return (
// KendoReact Button with SVG icon
<Button
onClick={handleClick}
title="Inline Code"
size="small"
>
<SvgIcon icon={codeSnippetIcon} />
</Button>
);
};
export default InlineCodeTool;

View File

@@ -1,3 +1,4 @@
// LoginComponent.jsx
import React, { useState } from 'react';
import { Input } from '@progress/kendo-react-inputs';
import { Label } from '@progress/kendo-react-labels';
@@ -28,18 +29,20 @@ const LoginComponent = ({ onLogin }) => {
};
return (
<>
<form onSubmit={handleSubmit}>
<div>
<div style={{ marginBottom: '100px', width: '350px', margin: '0 auto 100px auto' }}>
<form onSubmit={handleSubmit} style={{ width: '350px', contain: 'layout' }}>
<div style={{ textAlign: 'center' }}>
<Label htmlFor="username">Username: </Label>
<Input style={{ border: '1px solid #edbd7d' }} type="text" id="username" value={username} onChange={(e) => setUsername(e.target.value)} autoComplete="username" />
<Input style={{ border: '1px solid #edbd7d', width: '300px', maxWidth: '300px', minWidth: '300px' }} type="text" id="username" value={username} onChange={(e) => setUsername(e.target.value)} autoComplete="username" />
</div>
<div>
<div style={{ textAlign: 'center' }}>
<Label htmlFor="password">Password: </Label>
<Input style={{ border: '1px solid #edbd7d', marginBottom: '10px' }} type="password" id="password" value={password} onChange={(e) => setPassword(e.target.value)} autoComplete="current-password" />
<Input style={{ border: '1px solid #edbd7d', marginBottom: '10px', width: '300px', maxWidth: '300px', minWidth: '300px' }} type="password" id="password" value={password} onChange={(e) => setPassword(e.target.value)} autoComplete="current-password" />
</div>
<div style={{ textAlign: 'center' }}>
<Button look="flat" type="submit" style={{ padding: '0 20px' }}>Login</Button>
</div>
<div>
<NotificationGroup style={{ textAlign: 'center' }}>
{error && (<Notification type={{ style: 'error', icon: true }} closeable={true} onClose={() => setError('')}>
@@ -48,7 +51,7 @@ const LoginComponent = ({ onLogin }) => {
</NotificationGroup>
</div>
</form>
</>
</div>
);
};

View File

@@ -1,3 +1,4 @@
// PanelBar.jsx
import React from 'react';
import { PanelBar, PanelBarItem } from '@progress/kendo-react-layout';
import { useNavigate } from 'react-router-dom';
@@ -9,10 +10,25 @@ import {
} from '@progress/kendo-svg-icons';
import { panelbarData } from '../data/panelbar-data';
// Import custom icon images (PNG files)
import GiteaIcon from '../assets/icons/Gitea_Logo.png';
import NotionIcon from '../assets/icons/Notion-logo.png';
import HashnodeIcon from '../assets/icons/Hashnode_icon.png';
import DevIcon from '../assets/icons/DEV_Community_Badge.png';
import VeniceIcon from '../assets/icons/venice_icon.png';
import DlseitzIcon from '../assets/icons/dlseitz-icon.svg';
const iconMap = {
bookIcon, inboxIcon, trackChangesIcon, plusOutlineIcon,
globeOutlineIcon, linkIcon, tellAFriendIcon,
facebookIcon, xLogoIcon, linkedinIcon, redditIcon
facebookIcon, xLogoIcon, linkedinIcon, redditIcon,
// Custom SVG icons
giteaIcon: GiteaIcon,
notionIcon: NotionIcon,
hashnodeIcon: HashnodeIcon,
devIcon: DevIcon,
veniceIcon: VeniceIcon,
dlseitzIcon: DlseitzIcon
};
const CampfirePanelBar = ({ isExpanded = true }) => {
@@ -38,11 +54,24 @@ const CampfirePanelBar = ({ isExpanded = true }) => {
textDecoration: 'none'
}}
>
{iconMap[item.icon] && typeof iconMap[item.icon] === 'string' ? (
<img
src={iconMap[item.icon]}
alt={item.title}
style={{
width: '16px',
height: '16px',
marginLeft: '30px',
objectFit: 'contain'
}}
/>
) : (
<SvgIcon
icon={iconMap[item.icon]}
size="medium"
style={{ marginLeft: '30px' }}
/>
)}
<span style={{ marginLeft: '8px' }}>{item.title}</span>
</a>
}
@@ -77,7 +106,7 @@ const CampfirePanelBar = ({ isExpanded = true }) => {
);
}
// Parent items with children
// Headers with children
return (
<PanelBarItem
key={item.title}
@@ -102,7 +131,8 @@ const CampfirePanelBar = ({ isExpanded = true }) => {
<div style={{
width: isExpanded ? 300 : 200,
minWidth: isExpanded ? 240 : 60,
transition: 'width 0.3s'
transition: 'width 0.3s',
marginTop: '-10px',
}}>
<PanelBar>
{panelbarData.map(renderItem)}

View File

@@ -1,135 +0,0 @@
import React from 'react';
import { Drawer, DrawerContent } from '@progress/kendo-react-layout';
import { useNavigate, useLocation } from 'react-router-dom';
import { SvgIcon } from '@progress/kendo-react-common';
import { bookIcon, inboxIcon, trackChangesIcon, plusOutlineIcon, globeOutlineIcon, linkIcon, tellAFriendIcon, facebookIcon, xLogoIcon, linkedinIcon, redditIcon } from '@progress/kendo-svg-icons';
class ErrorBoundary extends React.Component {
constructor(props) {
super(props);
this.state = { hasError: false };
}
static getDerivedStateFromError(error) {
return { hasError: true };
}
componentDidCatch(error, errorInfo) {
console.error("ErrorBoundary caught an error", error, errorInfo);
}
render() {
if (this.state.hasError) {
return <h1>Something went wrong.</h1>;
}
return this.props.children;
}
}
const iconMap = {
bookIcon,
inboxIcon,
trackChangesIcon,
plusOutlineIcon,
globeOutlineIcon,
linkIcon,
tellAFriendIcon,
facebookIcon,
xLogoIcon,
linkedinIcon,
redditIcon
};
const SidebarDrawer = ({ children, isExpanded, onDrawerToggle, isLoggedIn }) => {
const navigate = useNavigate();
const location = useLocation();
const drawerItems = [
{ text: 'Content Summary', icon: 'bookIcon', route: '/dashboard' },
{ separator: true },
{ text: 'Published Posts', icon: 'inboxIcon', route: '/posts' },
{ text: 'Drafts', icon: 'trackChangesIcon', route: '/posts' },
{ text: 'New Post', icon: 'plusOutlineIcon', route: '/editor' },
{ separator: true },
{ text: 'External Links', icon: 'globeOutlineIcon', route: null },
{ separator: true },
{ text: 'dlseitz.dev', icon: 'linkIcon', route: 'https://dlseitz.dev', parent: 'External Links' },
{ text: 'Gitea', icon: 'linkIcon', route: 'https://gitea.dlseitz.dev', parent: 'External Links' },
{ text: 'Notion', icon: 'linkIcon', route: 'https://www.notion.so', parent: 'External Links' },
{ text: 'Hashnode', icon: 'linkIcon', route: 'https://hashnode.com', parent: 'External Links' },
{ text: 'DEV.to', icon: 'linkIcon', route: 'https://dev.to', parent: 'External Links' },
{ text: 'Venice.ai', icon: 'linkIcon', route: 'https://venice.ai', parent: 'External Links' },
{ separator: true },
{ text: 'Social', icon: 'tellAFriendIcon', route: null },
{ separator: true },
{ text: 'FaceBook', icon: 'facebookIcon', route: 'https://facebook.com', parent: 'Social' },
{ text: 'X', icon: 'xLogoIcon', route: 'https://x.com', parent: 'Social' },
{ text: 'LinkedIn', icon: 'linkedinIcon', route: 'https://linkedin.com', parent: 'Social' },
{ text: 'Reddit', icon: 'redditIcon', route: 'https://reddit.com', parent: 'Social' }
];
console.log('Drawer Items:', drawerItems);
const drawerItemRender = (props) => {
console.log('itemRender called with props:', props);
const { item } = props;
const isSelected = item.route && item.route === location.pathname;
console.log('Rendering item:', item);
console.log('Icon Component:', item.icon);
console.log('Route:', item.route);
if (item.separator) {
return <li className="k-drawer-separator" />;
}
if (item.route === null) {
return (
<li className="k-drawer-item k-text-primary">
<SvgIcon icon={iconMap[item.icon]} size="medium" />
<span style={{ marginLeft: '10px', fontSize: '0.9em', fontWeight: 'bold' }}>{item.text}</span>
</li>
);
}
if (item.route.startsWith('http')) {
return (
<li className="k-drawer-item">
<a href={item.route} target="_blank" rel="noopener noreferrer" className="k-drawer-link">
<SvgIcon icon={iconMap[item.icon]} />
<span style={{ marginLeft: '10px' }}>{item.text}</span>
</a>
</li>
);
}
return (
<li className={`k-drawer-item ${isSelected ? 'k-selected' : ''}`} onClick={() => navigate(item.route)}>
<span className="k-drawer-link">
<SvgIcon icon={iconMap[item.icon]} />
<span style={{ marginLeft: '10px' }}>{item.text}</span>
</span>
</li>
);
};
return (
<ErrorBoundary>
<Drawer
expanded={isExpanded}
mode="push"
mini={false}
position="start"
items={drawerItems.map(item => ({ ...item, selected: item.route === location.pathname }))}
itemRender={drawerItemRender}
>
<DrawerContent>
{children}
</DrawerContent>
</Drawer>
</ErrorBoundary>
);
};
export default SidebarDrawer;

View File

@@ -0,0 +1,75 @@
// Breadcrumb.jsx
import React from 'react';
import { Breadcrumb } from '@progress/kendo-react-layout';
import { useLocation } from 'react-router-dom';
const CampfireBreadcrumb = () => {
const location = useLocation();
const getBreadcrumbs = () => {
const path = location.pathname;
const pathSegments = path.split('/').filter(segment => segment);
if (path === '/dashboard' || path === '/') {
return [{ id: 'dashboard', text: 'Dashboard' }];
}
if (path === '/posts') {
return [
{ id: 'dashboard', text: 'Dashboard' },
{ id: 'posts', text: 'Published Posts' }
];
}
if (path === '/drafts') {
return [
{ id: 'dashboard', text: 'Dashboard' },
{ id: 'drafts', text: 'Drafts' }
];
}
if (path === '/editor') {
return [
{ id: 'editor', text: 'Editor' },
{ id: 'new-post', text: 'New Post' }
];
}
if (path.startsWith('/editor/')) {
const slug = pathSegments[1];
return [
{ id: 'editor', text: 'Editor' },
{ id: 'edit-post', text: 'Edit Post' } // Placeholder for actual title
];
}
return [{ id: 'dashboard', text: 'Dashboard' }];
};
const breadcrumbData = getBreadcrumbs();
console.log('Breadcrumb data:', breadcrumbData);
return (
<div style={{
marginLeft: '120px',
color: '#d94f27',
fontSize: '14px',
lineHeight: '1.2',
backgroundColor: 'transparent'
}}>
<style>
{`
.k-breadcrumb {
background-color: transparent !important;
}
.k-breadcrumb .k-breadcrumb-item {
background-color: transparent !important;
}
`}
</style>
<Breadcrumb data={breadcrumbData} />
</div>
);
};
export default CampfireBreadcrumb;

View File

@@ -10,10 +10,16 @@ export default function Copyright() {
}}
>
<p>
&copy; 2025{" | "}Derek L. Seitz{" | "}
<a href="https://dlseitz.dev" target="_blank" rel="noopener noreferrer" className="k-link">
&copy; 2025 Derek L. Seitz{" | "}
<a href="https://dlseitz.dev" target="_blank" rel="noopener noreferrer" style={{
color: '#d94f27',
textDecoration: 'none',
transition: 'color 0.2s ease'
}}
onMouseEnter={(e) => e.target.style.color = '#ff6f48'}
onMouseLeave={(e) => e.target.style.color = '#d94f27'}>
dlseitz.dev
</a>
</a>{" | "}All Rights Reserved
</p>
</div>
);

View File

@@ -11,10 +11,10 @@ header:
attribution: 'Photo by <a href="https://unsplash.com/@kemaldgn?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Kemal Berkay Dogan</a> on <a href="https://unsplash.com/photos/a-campfire-with-a-cup-of-coffee-sitting-in-front-of-it-TcUN5sDZPZ8?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Unsplash</a>'
---
Hey everyone! Welcome to my blog, **_Campfire Logs: The Art of Trial & Error_**. The idea behind this blog is to create a space where I can be completely transparent in my journey as a full-stack developer. We'll dive into the projects I'm building from the ground upfrom websites and backend systems to full-stack applicationsand I'll share the wins, the roadblocks, and, most importantly, the catastrophic moments when I inevitably break something (maybe intentionally, but probably not). I'll discuss the challenges I encounter, the rationale behind my design choices, and the lessons learned along the way.
Hey everyone! Welcome to my blog, ***Campfire Logs: The Art of Trial & Error***. The idea behind this blog is to create a space where I can be completely transparent in my journey as a full-stack developer. We'll dive into the projects I'm building from the ground upfrom websites and backend systems to full-stack applicationsand I'll share the wins, the roadblocks, and, most importantly, the catastrophic moments when I inevitably break something (maybe intentionally, but probably not). I'll discuss the challenges I encounter, the rationale behind my design choices, and the lessons learned along the way.
Let me go ahead and say that blogging is something I havent done since the old days of Xanga and Myspace (Im only 38, calm down now). Having just spent four years writing academic papers, my wording may at times come across as a bit dry and dense, but Im working on it. My journey to becoming a developer has been a bit unconventional, and Im a firm believer in building things the hard way to truly understand how they work. That's why I've taken on the challenge of building a full digital ecosystem from scratch for my brand, [dlseitz.dev](https://dlseitz.dev). From the website to the demo sites, the backend systems and a self-hosted Gitea server, this blog is where I'll log that entire process.
Let me go ahead and say that blogging is something I havent done since the old days of Xanga and Myspace (Im only 38, calm down now). Having just spent four years writing academic papers, my wording may at times come across as a bit dry and dense, but Im working on it. My journey to becoming a developer has been a bit unconventional, and Im a firm believer in building things the hard way to truly understand how they work. That's why I've taken on the challenge of building a full digital ecosystem from scratch for my brand, [dlseitz.dev](https://dlseitz.dev). From the website to the demo sites, the backend systems and a self-hosted Gitea server, this blog is where I'll log that entire process.
The first full blog post is coming this Wednesday, Aug 27 2025, so be sure to check it out. I will be sharing how the domain-level migration of my Gitea server goes. Im excited to be pulling it into the ecosystem I mentioned above. Fingers crossed that it goes smoothly because I always get super nervous when I start changing up config files. Debugging silly typos in them has caused me so much frustration in the past. Heres to hoping Ive learned from those past mistakes and that this goes off without (much of) a hitch!
The first full blog post is coming this Wednesday, Aug 27 2025, so be sure to check it out. I will be sharing how the domain-level migration of my Gitea server goes. Im excited to be pulling it into the ecosystem I mentioned above. Fingers crossed that it goes smoothly because I always get super nervous when I start changing up config files. Debugging silly typos in them has caused me so much frustration in the past. Heres to hoping Ive learned from those past mistakes and that this goes off without (much of) a hitch!
Thanks for stopping by!

View File

@@ -10,53 +10,53 @@ header:
attribution: 'Photo by <a href="https://unsplash.com/@lc_photography?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Leon Contreras</a> on <a href="https://unsplash.com/photos/selective-focus-photography-of-marshmallows-on-fire-pit-YndHL7gQIJE?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Unsplash</a>'
---
Hey everyone! Welcome to my first _full_ blog post for **_Campfire Logs: The Art of Trial & Error_**. My initial post, [#0 - Setting Up Camp: A Backstory](https://hashnode.com/post/cmeqsflzr002h02jrgpsrdcxe), introduced me a little and talked some about my goals and vision for this blog. Ill admit that when I began thinking about the direction I wanted to take this, I was worried that I wouldnt be able to come up with engaging content consistently (and maybe I havent yet).
Hey everyone! Welcome to my first *full* blog post for ***Campfire Logs: The Art of Trial & Error***. My initial post, [#0 - Setting Up Camp: A Backstory](https://hashnode.com/post/cmeqsflzr002h02jrgpsrdcxe), introduced me a little and talked some about my goals and vision for this blog. Ill admit that when I began thinking about the direction I wanted to take this, I was worried that I wouldnt be able to come up with engaging content consistently (and maybe I havent yet).
I quickly realized, though, that topics to write about can and will just fall into your lap very easily and repeatedly. This can be especially true when learning to use different tools and techniques with your existing workflowor if youre like me, you look at something and think, Sure! How hard could it possibly be?
I quickly realized, though, that topics to write about can and will just fall into your lap very easily and repeatedly. This can be especially true when learning to use different tools and techniques with your existing workflowor if youre like me, you look at something and think, Sure! How hard could it possibly be?
**The Universe to me** : Well Im glad you asked
**The Universe to me**: Well Im glad you asked…”
Fair warning: this post is longer than my usual campfire chats will beits a full story with a few twists. So you may want to grab a coffee (or some marshmallows and a stick), and if youre short on time, Ive included a [TL;DR](#tldr) section just in case. Lets get to it!
Fair warning: this post is longer than my usual campfire chats will beits a full story with a few twists. So you may want to grab a coffee (or some marshmallows and a stick), and if youre short on time, Ive included a [TL;DR](#tldr) section just in case. Lets get to it!
## **The First Server of My Very Own**
Around the end of February, 2025, I spent a lot of time exploring the free-tier offerings of [AWS](https://aws.com) and its services ([EC2](https://aws.amazon.com/ec2/), [RDS](https://aws.amazon.com/rds/), [S3](https://aws.amazon.com/s3/), etc.). I still had just under 6 months of school left, but I was applying to tech-job after tech-job anyway because I knew it was (and still is) a long road. At the time, being 37 and still in college, transitioning from the food- and public-service industries into Tech was (and still is) pretty intimidating and overwhelming. So I was trying hard to find something (anything) that might help make me look attractive to potential employers.
At the same time, I was learning to use [Git](https://git-scm.com/) more consistently, and because of my natural tendency to look for alternatives to the mainstream options (yeah, Im rebellious like that), I stumbled upon [Gitea](https://about.gitea.com). For those who arent familiar with Gitea, its an open-source, self-hosted Git service that has a similar feel and functionality to [GitHub](https://github.com/). If youre privacy-conscious or simply just want better control over your projects and codebase, its a great alternative to the more popular Git services available. But I digress
At the same time, I was learning to use [Git](https://git-scm.com/) more consistently, and because of my natural tendency to look for alternatives to the mainstream options (yeah, Im rebellious like that), I stumbled upon [Gitea](https://about.gitea.com). For those who arent familiar with Gitea, its an open-source, self-hosted Git service that has a similar feel and functionality to [GitHub](https://github.com/). If youre privacy-conscious or simply just want better control over your projects and codebase, its a great alternative to the more popular Git services available. But I digress
Thinking that a self-hosted Gitea server could be a step toward standing out more and because I was already becoming more familiar with cloud infrastructure, I started [Googling](https://google.com/) and [ChatGPT-ing](https://chatgpt.com/) where to start. It turns out that setting up the server really wasnt all that difficult, but that doesnt mean I was confident building it out.
Thinking that a self-hosted Gitea server could be a step toward standing out more and because I was already becoming more familiar with cloud infrastructure, I started [Googling](https://google.com/) and [ChatGPT-ing](https://chatgpt.com/) where to start. It turns out that setting up the server really wasnt all that difficult, but that doesnt mean I was confident building it out.
Long story short, I registered a cheap domain with [Porkbun.com](https://porkbun.com/), installed Gitea on a free-tier EC2 instance, pointed new domains [A Records](https://www.cloudflare.com/learning/dns/dns-records/dns-a-record/) to the IP address I reserved for the server, and set up a [reverse proxy](https://www.cloudflare.com/learning/cdn/glossary/reverse-proxy/) using [NGINX](https://nginx.org/). I wont lieI felt like a million bucks because I accomplished something through my own undertaking that, up until that point, I had only studied about in school.
Long story short, I registered a cheap domain with [Porkbun.com](https://porkbun.com/), installed Gitea on a free-tier EC2 instance, pointed new domains [A Records](https://www.cloudflare.com/learning/dns/dns-records/dns-a-record/) to the IP address I reserved for the server, and set up a [reverse proxy](https://www.cloudflare.com/learning/cdn/glossary/reverse-proxy/) using [NGINX](https://nginx.org/). I wont lieI felt like a million bucks because I accomplished something through my own undertaking that, up until that point, I had only studied about in school.
I played around with it for a couple of weeks, and then, just like you did with that new toy you got for Christmas when you were 10, I didnt touch it again for months.
I played around with it for a couple of weeks, and then, just like you did with that new toy you got for Christmas when you were 10, I didnt touch it again for months.
## **The Ecosystem**
## **The Ecosystem**
Fast-forward to the near-present, and with my bachelors degree finally in hand (exactly twenty years after high school), all I have to show for a nine-month-long job search is an inbox full of rejection emails. Got it. A degree isnt enough to get a job anymore. Thats not really how they market this at universities these days, but something will come along. No rush, right?
Fast-forward to the near-present, and with my bachelors degree finally in hand (exactly twenty years after high school), all I have to show for a nine-month-long job search is an inbox full of rejection emails. Got it. A degree isnt enough to get a job anymore. Thats not really how they market this at universities these days, but something will come along. No rush, right?
Well, not exactly. You see, I cant just sit still. I have to keep moving, to keep doing, to keep building something I decided that I needed to build my own experience toyou guessed ittry and make myself more attractive to potential employers AND _potential clients_. How, you ask?
Well, not exactly. You see, I cant just sit still. I have to keep moving, to keep doing, to keep building something I decided that I needed to build my own experience toyou guessed ittry and make myself more attractive to potential employers AND *potential clients*. How, you ask?
Well Im glad you asked
Well Im glad you asked
Being the resourceful person that I amor being a MacGyver of my generation, ratherI can be pretty good at doing the most while having very little to work with. So what did I have to work with? A laptop with [VS Code](https://code.visualstudio.com/), an internet connection, and a free domain through [Name.com](https://name.com)s partnership with [GitHub Educations Student Developer Pack](https://education.github.com/pack) Id find a way to make it work. Why not pull my long-forgotten Gitea server out of the back of the closet too? The [EC2](https://aws.amazon.com/ec2/) instance was still running, and I still had that domain for a few months longer. This was shaping up better than I thought already!
Being the resourceful person that I amor being a MacGyver of my generation, ratherI can be pretty good at doing the most while having very little to work with. So what did I have to work with? A laptop with [VS Code](https://code.visualstudio.com/), an internet connection, and a free domain through [Name.com](https://name.com)s partnership with [GitHub Educations Student Developer Pack](https://education.github.com/pack) Id find a way to make it work. Why not pull my long-forgotten Gitea server out of the back of the closet too? The [EC2](https://aws.amazon.com/ec2/) instance was still running, and I still had that domain for a few months longer. This was shaping up better than I thought already!
Cue the registration of [dlseitz.dev](https://dlseitz.dev), a sort of live portfolio that I could expand on as I go. What better way to show I can build something with modern tools and the skills I just spent four years developing, right? It didnt take long, though, before I realized just a website wasnt going to be enough.
Cue the registration of [dlseitz.dev](https://dlseitz.dev), a sort of live portfolio that I could expand on as I go. What better way to show I can build something with modern tools and the skills I just spent four years developing, right? It didnt take long, though, before I realized just a website wasnt going to be enough.
Over the next few weeks, I decided to explore what a static site generator (SSG), particularly [11ty](https://www.11ty.dev), could do to improve my website coding. From there, I decided I needed a backend application built on [Node.js](https://nodejs.org/) with [Express](https://expressjs.com/) to securely process the contact form on the site, store it in a [PostgreSQL](https://www.postgresql.org) database, and email me the users inquiry. I also needed to come up with a way to present live demo sites, each being designed with specific business needs in mind, in a way that wouldnt complicate the website itself. Hosting them as subdomains of my site was my answer. From there, I wanted a way to share my experiences with others (my work is only possible by building from the work others put in first), but I want the solution to be my own (Im really into self-hosting if you cant tell by now). With that still being in the planning and discovery phase, I also wanted to be able to chronicle the journey of building it as I went (thanks [Hashnode](https://hashnode.com)!).
Over the next few weeks, I decided to explore what a static site generator (SSG), particularly [11ty](https://www.11ty.dev), could do to improve my website coding. From there, I decided I needed a backend application built on [Node.js](https://nodejs.org/) with [Express](https://expressjs.com/) to securely process the contact form on the site, store it in a [PostgreSQL](https://www.postgresql.org) database, and email me the users inquiry. I also needed to come up with a way to present live demo sites, each being designed with specific business needs in mind, in a way that wouldnt complicate the website itself. Hosting them as subdomains of my site was my answer. From there, I wanted a way to share my experiences with others (my work is only possible by building from the work others put in first), but I want the solution to be my own (Im really into self-hosting if you cant tell by now). With that still being in the planning and discovery phase, I also wanted to be able to chronicle the journey of building it as I went (thanks [Hashnode](https://hashnode.com)!).
So clearly, in the natural progression of things, and with my Gitea servers domain expiring in a few short months, I decided this was the perfect time for pulling the server into the ecosystem by migrating it to its own dlseitz.dev subdomain.
So clearly, in the natural progression of things, and with my Gitea servers domain expiring in a few short months, I decided this was the perfect time for pulling the server into the ecosystem by migrating it to its own dlseitz.dev subdomain.
Now, with all of this going on, you may be wondering if Im only saying things at this point just to say them, but Im not. I guess Ive finally started to lean into a passion for developing (we already established that Im a late bloomer). Right now, at this point at least, I _dont_ feel like Ive bitten off more than I can chew, but dont we all feel that way right before we _really_ get going on a project?
Now, with all of this going on, you may be wondering if Im only saying things at this point just to say them, but Im not. I guess Ive finally started to lean into a passion for developing (we already established that Im a late bloomer). Right now, at this point at least, I *dont* feel like Ive bitten off more than I can chew, but dont we all feel that way right before we *really* get going on a project?
## **Domain-Level Migration Sounds kind of scary, huh?**
## **Domain-Level Migration’… Sounds kind of scary, huh?**
### **What a Domain-Level Migration Really Means**
So what exactly is a domain-level migration? Its a pretty broad term, but simply put, its moving something like a website, a web app, or even an entire Active Directory from one domain to another. This can range in scope from physically migrating from one infrastructure to another to simply changing which domains DNS records point to a particular server where a website or web app is hosted, and of course, everything in between.
So what exactly is a domain-level migration? Its a pretty broad term, but simply put, its moving something like a website, a web app, or even an entire Active Directory from one domain to another. This can range in scope from physically migrating from one infrastructure to another to simply changing which domains DNS records point to a particular server where a website or web app is hosted, and of course, everything in between.
### **Planning the Changes**
When performing a migration of any kind, you should always start by writing (or typing, if you prefer) a well-informed, step-by-step action plan, including contingencies for any points of failure that you can identify. Having a Plan C or D can often be just as important as Plan A is. I cant stress that enough. For my migration, I had a few options to consider, and to be honest, Im not really sure if Im happy with the route I chose, at least as a long-term solution. That doesnt mean I chose the wrong option, but it can end up affecting my other projects down the road.
When performing a migration of any kind, you should always start by writing (or typing, if you prefer) a well-informed, step-by-step action plan, including contingencies for any points of failure that you can identify. Having a Plan C or D can often be just as important as Plan A is. I cant stress that enough. For my migration, I had a few options to consider, and to be honest, Im not really sure if Im happy with the route I chose, at least as a long-term solution. That doesnt mean I chose the wrong option, but it can end up affecting my other projects down the road.
### **Choosing My Migration Path**
@@ -66,40 +66,40 @@ With a simple plan for this migration (an in-place transition) now in place I wa
### **Step One: Update DNS Records**
First, I had to update the DNS records for the new subdomain. Normally I would have done this through my domain registrar (Name.com for this particular domain), but I had just swapped to letting [Cloudflare](https://www.cloudflare.com/) manage my DNS for the domain and subdomains because they will automatically renew your [SSL](https://aws.amazon.com/what-is/ssl-certificate/) certificates from [Lets Encrypt](https://letsencrypt.org/) every 90 days. Once I configured the new A Record (DNS) to point my subdomain to the static IP address of my EC2 instance, it didnt take long to propagate. I wont lie--I was a little sad to have to say goodbye to the old domain. It was a bittersweet moment (Im not crying youre crying).
First, I had to update the DNS records for the new subdomain. Normally I would have done this through my domain registrar (Name.com for this particular domain), but I had just swapped to letting [Cloudflare](https://www.cloudflare.com/) manage my DNS for the domain and subdomains because they will automatically renew your [SSL](https://aws.amazon.com/what-is/ssl-certificate/) certificates from [Lets Encrypt](https://letsencrypt.org/) every 90 days. Once I configured the new A Record (DNS) to point my subdomain to the static IP address of my EC2 instance, it didnt take long to propagate. I wont lie--I was a little sad to have to say goodbye to the old domain. It was a bittersweet moment (Im not crying youre crying).
### **Step Two: Hunting Down Config Files**
My next step was to stop the Gitea process running on the server, and then get ready to make some edits to some config files. It was about now that I remembered I never got around to documenting where I installed everything on my EC2 instance back in February. Of course, me being me, I didn't use their default locations, either, so I had to go on a digital scavenger hunt to find the app.ini and the correct NGINX server block. After a good bit of searching, I finally found them (and documented their whereabouts in my dedicated [Notion](https://www.notion.so/) dashboard: Dereks Dev Infrabase catchy, right?). Now for the nerve-racking part.
My next step was to stop the Gitea process running on the server, and then get ready to make some edits to some config files. It was about now that I remembered I never got around to documenting where I installed everything on my EC2 instance back in February. Of course, me being me, I didn't use their default locations, either, so I had to go on a digital scavenger hunt to find the app.ini and the correct NGINX server block. After a good bit of searching, I finally found them (and documented their whereabouts in my dedicated [Notion](https://www.notion.so/) dashboard: Dereks Dev Infrabase catchy, right?). Now for the nerve-racking part.
I've broken a system or two by messing up a config file, so even something as small as a single out-of-place comma or semicolon can cause a total catastrophe. With that in mind, I quite anxiously tiptoed through the configuration files, changing only the settings necessary so that Gitea would recognize its new home. To my dismay, this part actually went off without a hitch. You can imagine the relief.
### **Step Three: SSL Certificate Troubles**
The real test came when I tried to download and install my Lets Encrypt [wildcard SSL certificate](https://www.digicert.com/faq/public-trust-and-certificates/what-is-a-wildcard-certificate) to secure the new [gitea.dlseitz.dev](http://gitea.dlseitz.dev) subdomain. The dominoes started to fall immediately. First, trying to install [Certbot](https://certbot.eff.org/) and the [certbot-dns-cloudflare plugin](https://certbot-dns-cloudflare.readthedocs.io/) resulted in a Python versioning issue. I had to install [pip3](https://pypi.org/project/pip/) on its own because it didnt install with Python. But even after that, getting the plugin to work was like trying to drop a toddler off at daycareSO MANY DEPENDENCY ISSUES.
The real test came when I tried to download and install my Lets Encrypt [wildcard SSL certificate](https://www.digicert.com/faq/public-trust-and-certificates/what-is-a-wildcard-certificate) to secure the new [gitea.dlseitz.dev](http://gitea.dlseitz.dev) subdomain. The dominoes started to fall immediately. First, trying to install [Certbot](https://certbot.eff.org/) and the [certbot-dns-cloudflare plugin](https://certbot-dns-cloudflare.readthedocs.io/) resulted in a Python versioning issue. I had to install [pip3](https://pypi.org/project/pip/) on its own because it didnt install with Python. But even after that, getting the plugin to work was like trying to drop a toddler off at daycareSO MANY DEPENDENCY ISSUES.
To resolve this, I had to install [snapd](https://snapcraft.io/docs/installing-snapd), a separate package manager for Linux, to correctly install the plugin. Once all of that was done, I realized the config file with my Cloudflare API token had a small syntax error that was keeping Certbot from obtaining my SSL/TLS certificate. When I realized what the issue was, it was an easy thing to fix.
### **Final Hiccup: SSH Blocked by Cloudflare**
With the certificate installed and the Gitea process restarted, I was very happy that the updated domain directed as expected. I wasnt quite finished yet, though. I tried to push test commits from my local repos to the remote server, but it wouldn't work. I kept getting a rather nerve-racking error that used alarming terms like Fatal, access rights, and make sure the repository exists. After a few failed attempts and a good bit of head scratching and grounding exercises (therapy finally came in handy), I finally asked [Gemini](https://gemini.google.com/) (Googles generative AI model), explaining what I had just done with the server. It quickly told me the likely reason I couldnt push was because I was doing so using [SSH](https://www.cloudflare.com/learning/access-management/what-is-ssh/), and that Cloudflare blocks SSH connections on port 22 on all proxied subdomains for security reasons.
With the certificate installed and the Gitea process restarted, I was very happy that the updated domain directed as expected. I wasnt quite finished yet, though. I tried to push test commits from my local repos to the remote server, but it wouldn't work. I kept getting a rather nerve-racking error that used alarming terms like Fatal,” “access rights, and make sure the repository exists. After a few failed attempts and a good bit of head scratching and grounding exercises (therapy finally came in handy), I finally asked [Gemini](https://gemini.google.com/) (Googles generative AI model), explaining what I had just done with the server. It quickly told me the likely reason I couldnt push was because I was doing so using [SSH](https://www.cloudflare.com/learning/access-management/what-is-ssh/), and that Cloudflare blocks SSH connections on port 22 on all proxied subdomains for security reasons.
The solution was literally as simple as flipping a switch. I just had to turn off Cloudflare's proxy for my Gitea subdomain. And with two clicks, it was done.
### **Made It Out Alive**
All in all, the migration really wasnt that problematic. A few frustrating hiccups happened, but a good rule of thumb is to always expect that something will stray from even the most well-laid out plan.
All in all, the migration really wasnt that problematic. A few frustrating hiccups happened, but a good rule of thumb is to always expect that something will stray from even the most well-laid out plan.
**_The end._**
***The end.***
## TL;DR
I migrated my old self-hosted Gitea server onto my [dlseitz.dev](http://dlseitz.dev) subdomain. DNS went smooth, configs werent too scary, SSL certs gave me grief, and Cloudflare blocked my SSH until I flipped a switch. Lessons learned: always document installs, expect dependency hell, and dont panic when you see fatal in error logs.
I migrated my old self-hosted Gitea server onto my [dlseitz.dev](http://dlseitz.dev) subdomain. DNS went smooth, configs werent too scary, SSL certs gave me grief, and Cloudflare blocked my SSH until I flipped a switch. Lessons learned: always document installs, expect dependency hell, and dont panic when you see fatal in error logs.
## **Before You Go**
I want to say thanks again to everyone for reading this. I appreciate you sticking around for this campfire story. I know it is a little on the hefty side, but I really hope it wasnt too much of a snooze-fest to you.
I want to say thanks again to everyone for reading this. I appreciate you sticking around for this campfire story. I know it is a little on the hefty side, but I really hope it wasnt too much of a snooze-fest to you.
I encourage you to tell me what you thought about the article (what worked for you, what didnt) in the comments. Or perhaps you have some suggestions on how I could have handled the migration betterlet me know that, too. Im always looking to learn and improve.
I encourage you to tell me what you thought about the article (what worked for you, what didnt) in the comments. Or perhaps you have some suggestions on how I could have handled the migration betterlet me know that, too. Im always looking to learn and improve.
Also, be sure to check back soon for the next installment. I will be talking about how easy it can be to overlook a needed component of a project, what that can mean down the road, and you can bet that I have a story to go along with it!

View File

@@ -10,68 +10,68 @@ header:
attribution: 'Image generated with Sora. | © 2025 Derek L. Seitz'
---
Hey there everyone! Welcome to **_Campfire Logs: The Art of Trial & Error._** In my last log, "[#1 - The Great Gitea Migration](https://hashnode.com/post/cmeuf5cdo000o02l8cem786pt), I introduced you to my self-hosted [Gitea](https://about.gitea.com/) and shared the nerves, hiccups, and (eventual) success of migrating it into my [dlseitz.dev](https://dlseitz.dev) ecosystem.
Hey there everyone! Welcome to ***Campfire Logs: The Art of Trial & Error.*** In my last log, "[#1 - The Great Gitea Migration](https://hashnode.com/post/cmeuf5cdo000o02l8cem786pt), I introduced you to my self-hosted [Gitea](https://about.gitea.com/) and shared the nerves, hiccups, and (eventual) success of migrating it into my [dlseitz.dev](https://dlseitz.dev) ecosystem.
Today I want to tell you about a recent blunder of mine (you guessed it, a forgotten privacy policy), and my experience trying to retrofit a solution into my existing codebase (ever tried adding a new field to your form JSON after youve already wired everything up? Yeah). Ill also talk a little bit about my philosophy on mistakes like this and explore ways to help prevent them in the future (spoiler alert: theres no absolutely foolproof way).
Today I want to tell you about a recent blunder of mine (you guessed it, a forgotten privacy policy), and my experience trying to retrofit a solution into my existing codebase (ever tried adding a new field to your form JSON after youve already wired everything up? Yeah). Ill also talk a little bit about my philosophy on mistakes like this and explore ways to help prevent them in the future (spoiler alert: theres no absolutely foolproof way).
Let me say real quick that this log will not be as lengthy as my last, but you can still grab a coffee or some marshmallows and a stick as we get started. Theres also a **_TL;DR_** section at the bottom of the page for anyone in a hurry.
Let me say real quick that this log will not be as lengthy as my last, but you can still grab a coffee or some marshmallows and a stick as we get started. Theres also a ***TL;DR*** section at the bottom of the page for anyone in a hurry.
Lets get to it!
Lets get to it!
## **How Did I Miss That?**
Its true. I forgot to add a privacy policy to my website before it went live. And while that may be a very small thing to overlook in the grand scheme of thingsespecially for a solo developerbeing transparent with how you handle clients information can really help to build trust.
Its true. I forgot to add a privacy policy to my website before it went live. And while that may be a very small thing to overlook in the grand scheme of thingsespecially for a solo developerbeing transparent with how you handle clients information can really help to build trust.
Now, its pretty safe to say that the vast majority of us are guilty of clicking the little checkbox saying we agree to the privacy policy and terms & conditions _without actually reading over them_. There are even studies published that back this up. In 2019, [Pew Research Center found that only about 1 in 5 Americans](https://www.pewresearch.org/internet/2019/11/15/americans-and-privacy-concerned-confused-and-feeling-lack-of-control-over-their-personal-information/?utm_source=chatgpt.com#:~:text=How%20Americans%20handle%20privacy%20policies%3A) actually read privacy policies before agreeing to them, and most of those said they dont really understand the laws surrounding them. Ill let you dive into all of that on your own, but the takeaway here is pretty clear: most people simply dont engage with privacy policies. Not in any meaningful way, at least.
Now, its pretty safe to say that the vast majority of us are guilty of clicking the little checkbox saying we agree to the privacy policy and terms & conditions *without actually reading over them*. There are even studies published that back this up. In 2019, [Pew Research Center found that only about 1 in 5 Americans](https://www.pewresearch.org/internet/2019/11/15/americans-and-privacy-concerned-confused-and-feeling-lack-of-control-over-their-personal-information/?utm_source=chatgpt.com#:~:text=How%20Americans%20handle%20privacy%20policies%3A) actually read privacy policies before agreeing to them, and most of those said they dont really understand the laws surrounding them. Ill let you dive into all of that on your own, but the takeaway here is pretty clear: most people simply dont engage with privacy policies. Not in any meaningful way, at least.
So why did it matter _to me?_ Why was I a little shocked that I didnt think about it sooner?
So why did it matter *to me?* Why was I a little shocked that I didnt think about it sooner?
Because integrity matters, and two of my core values are centered around **_transparency_** and **_privacy_**. Now, Im not saying Im perfect or better than anyone (far from it). What I am saying is that I make genuine effort to stay true to the things I value most, making sure the effort is clear through my work.
Because integrity matters, and two of my core values are centered around ***transparency*** and ***privacy***. Now, Im not saying Im perfect or better than anyone (far from it). What I am saying is that I make genuine effort to stay true to the things I value most, making sure the effort is clear through my work.
[So, I decided to add a privacy policy.](https://dlseitz.dev/privacy-policy/)
## **The Developers Burden**
## **The Developers Burden**
This is where the actual fun started: coding the retrofit. No, reallyit was fun (at first). The goal was to ensure that a user consented to my websites privacy policy before they could submit their info using the contact form. However, I really dont know why I was so dead set on a checkbox being the mechanism for it. I guess my subconscious knew it was the hard way since there are _much_ simpler ways to accomplish this.
This is where the actual fun started: coding the retrofit. No, reallyit was fun (at first). The goal was to ensure that a user consented to my websites privacy policy before they could submit their info using the contact form. However, I really dont know why I was so dead set on a checkbox being the mechanism for it. I guess my subconscious knew it was the hard way since there are *much* simpler ways to accomplish this.
When I wrote out my course of action, I started with the front end, then the backend app, and finally the database, so this was the order I worked in. In hindsight, I think that the reversed order would have saved me a bunch of headache in the long run (sort of like stringing lights on a Christmas tree). I dont think that I did it in the _wrong_ order, just in an order that introduced more opportunity for bugs.
When I wrote out my course of action, I started with the front end, then the backend app, and finally the database, so this was the order I worked in. In hindsight, I think that the reversed order would have saved me a bunch of headache in the long run (sort of like stringing lights on a Christmas tree). I dont think that I did it in the *wrong* order, just in an order that introduced more opportunity for bugs.
The front end part of this endeavor went pretty smoothly. I started by creating a new [Nunjucks](https://mozilla.github.io/nunjucks/) template for the privacy policy webpage (I use the [static site generator (SSG)](https://www.cloudflare.com/learning/performance/static-site-generator/) [11ty (Eleventy)](https://11ty.dev) to build out my site). Next, I created a new stylesheet for the page-specific CSS rules. As weird as this may sound, having separate stylesheets for different concerns does it for the neat freak inside of meI completely lit up with joy when I first realized that was even a thing (that part seemed to have been left out in school). Then, the last thing before moving on to the JavaScript was to add the policy page to my footer nav links.
The front end part of this endeavor went pretty smoothly. I started by creating a new [Nunjucks](https://mozilla.github.io/nunjucks/) template for the privacy policy webpage (I use the [static site generator (SSG)](https://www.cloudflare.com/learning/performance/static-site-generator/) [11ty (Eleventy)](https://11ty.dev) to build out my site). Next, I created a new stylesheet for the page-specific CSS rules. As weird as this may sound, having separate stylesheets for different concerns does it for the neat freak inside of meI completely lit up with joy when I first realized that was even a thing (that part seemed to have been left out in school). Then, the last thing before moving on to the JavaScript was to add the policy page to my footer nav links.
So far, so good.
### Where Things Start Getting Slippery
Now, my backend application isnt really all that complicated. Its a simple [Front Controller](https://www.geeksforgeeks.org/system-design/front-controller-design-pattern/) with [Chain of Responsibility](https://medium.com/@artemkhrenov/the-chain-of-responsibility-pattern-in-javascript-building-elegant-request-handlers-b9a72a16d7cc) [design pattern](https://sourcemaking.com/design_patterns) written on [Node.js](https://nodejs.org) with [Express](https://expressjs.com). It consists of the main app that receives the forms [JSON payload](https://www.ibm.com/docs/en/baw/24.0.x?topic=protocol-common-json-payload-cases-case-types#:~:text=The%20workflow%20REST%20protocol%20defines,following%20workflow%20REST%20protocol%20resources:) through a [POST request](https://www.w3schools.com/tags/ref_httpmethods.asp) to its [API](https://www.ibm.com/think/topics/api), then three modules that handle the rest. The [router module](https://developer.mozilla.org/en-US/docs/Learn_web_development/Extensions/Server-side/Express_Nodejs/routes) transports the payload through the appropriate logic, the [middleware module](https://www.w3schools.com/nodejs/nodejs_middleware.asp) runs security checks (spam, bots, etc.), and the [controller module](https://developer.mozilla.org/en-US/docs/Learn_web_development/Extensions/Server-side/Express_Nodejs/routes) formats and stores it in my [PostgreSQL](https://www.postgresql.org/) database before emailing it to me. The backend work for the retrofit was only to make sure the router expected the added checkbox [Boolean](https://developer.mozilla.org/en-US/docs/Glossary/Boolean) in the payload, the controller knew where to insert it and a timestamp into the database, and the database had a place for the new data to go.
Now, my backend application isnt really all that complicated. Its a simple [Front Controller](https://www.geeksforgeeks.org/system-design/front-controller-design-pattern/) with [Chain of Responsibility](https://medium.com/@artemkhrenov/the-chain-of-responsibility-pattern-in-javascript-building-elegant-request-handlers-b9a72a16d7cc) [design pattern](https://sourcemaking.com/design_patterns) written on [Node.js](https://nodejs.org) with [Express](https://expressjs.com). It consists of the main app that receives the forms [JSON payload](https://www.ibm.com/docs/en/baw/24.0.x?topic=protocol-common-json-payload-cases-case-types#:~:text=The%20workflow%20REST%20protocol%20defines,following%20workflow%20REST%20protocol%20resources:) through a [POST request](https://www.w3schools.com/tags/ref_httpmethods.asp) to its [API](https://www.ibm.com/think/topics/api), then three modules that handle the rest. The [router module](https://developer.mozilla.org/en-US/docs/Learn_web_development/Extensions/Server-side/Express_Nodejs/routes) transports the payload through the appropriate logic, the [middleware module](https://www.w3schools.com/nodejs/nodejs_middleware.asp) runs security checks (spam, bots, etc.), and the [controller module](https://developer.mozilla.org/en-US/docs/Learn_web_development/Extensions/Server-side/Express_Nodejs/routes) formats and stores it in my [PostgreSQL](https://www.postgresql.org/) database before emailing it to me. The backend work for the retrofit was only to make sure the router expected the added checkbox [Boolean](https://developer.mozilla.org/en-US/docs/Glossary/Boolean) in the payload, the controller knew where to insert it and a timestamp into the database, and the database had a place for the new data to go.
Simple.
After wiring everything upthe HTML, CSS, vanilla JS, database fields, router, and controllerI figured my form was ready to test. But thats when the headaches started. Submitting dummy info kept throwing me into a loop where I couldnt tell if the culprit was the checkboxs [`required` attribute](https://developer.mozilla.org/en-US/docs/Web/CSS/:required) or my custom JS validation. Alright, let me just crack my knuckles and pop my neck Ive got this.
After wiring everything upthe HTML, CSS, vanilla JS, database fields, router, and controllerI figured my form was ready to test. But thats when the headaches started. Submitting dummy info kept throwing me into a loop where I couldnt tell if the culprit was the checkboxs [`required` attribute](https://developer.mozilla.org/en-US/docs/Web/CSS/:required) or my custom JS validation. Alright, let me just crack my knuckles and pop my neck Ive got this.
Even after a hard refresh ( **Ctrl+Shift+R** ) finally cleared the caching issues, I was immediately hit with a new problem: [_500 Internal Server Error_](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Status/500). But hey, at least now my form payload was making it to the server progress nonetheless! [PM2](https://pm2.keymetrics.io/) logs insisted my timestamp column didnt exist in the table (even though it did). I tore through my controller logic and even rebuilt the table from scratch (thinking it may be a lower-level bug way above my pay grade), but nothing changed. Frustrated, I gave up for the night. It was late, and I clearly didnt have this, at least not yet.
Even after a hard refresh (**Ctrl+Shift+R**) finally cleared the caching issues, I was immediately hit with a new problem: [*500 Internal Server Error*](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Status/500). But hey, at least now my form payload was making it to the server progress nonetheless! [PM2](https://pm2.keymetrics.io/) logs insisted my timestamp column didnt exist in the table (even though it did). I tore through my controller logic and even rebuilt the table from scratch (thinking it may be a lower-level bug way above my pay grade), but nothing changed. Frustrated, I gave up for the night. It was late, and I clearly didnt have this, at least not yet.
### A Hard Refresh Makes Things Click
The next day, after a hard refresh of my own, I decided to sidestep the checkbox entirely by replacing it with a consent by affirmative action clause above the submit buttonshort, sweet, and to the point. I thought for sure this solved everythingbut nope. I think sleeping on the problem was the real hero here, because when I looked over the controller logic again, the issue was embarrassingly obvious: I had set the column type in the database table to `timestamp`, but was also trying to manually insert a timestamp value via [`NOW()`](https://www.postgresql.org/docs/current/functions-datetime.html) in the controller. Since the database automatically assigns a timestamp when a new record is created, my insert kept failing. Simply letting the database do database things made all the difference in the world.
The next day, after a hard refresh of my own, I decided to sidestep the checkbox entirely by replacing it with a consent by affirmative action clause above the submit buttonshort, sweet, and to the point. I thought for sure this solved everythingbut nope. I think sleeping on the problem was the real hero here, because when I looked over the controller logic again, the issue was embarrassingly obvious: I had set the column type in the database table to `timestamp`, but was also trying to manually insert a timestamp value via [`NOW()`](https://www.postgresql.org/docs/current/functions-datetime.html) in the controller. Since the database automatically assigns a timestamp when a new record is created, my insert kept failing. Simply letting the database do database things made all the difference in the world.
At this point, though, the checkbox was already long gone. And honestly, I wasnt even slightly upset about it. It meant one less hurdle for potential clients trying to reach out and one less datapoint to validate and store. Sometimes, simplicity really is the best policy.
At this point, though, the checkbox was already long gone. And honestly, I wasnt even slightly upset about it. It meant one less hurdle for potential clients trying to reach out and one less datapoint to validate and store. Sometimes, simplicity really is the best policy.
## Forgive Your Mistakes: Do Better Next Time
Heres the thing about mistakes: theyre not signs you dont belong theyre signs youre doing the work. Every overlooked field, every broken query, every duh moment is just part of the feedback loop. For me, integrity means owning those errors instead of hiding them, and transparency means being willing to talk about them openly (even in public, like this log). [Imposter syndrome](https://www.psychologytoday.com/us/basics/imposter-syndrome) loves to whisper that youll be found out as a fraud and that making mistakes proves youre not cut out for this. In reality, though, mistakes are how you learn and get better. As long as we keep building, keep refining, and keep letting our values guide the process, the mistakes stop being failures and start being steppingstones.
Heres the thing about mistakes: theyre not signs you dont belong theyre signs youre doing the work. Every overlooked field, every broken query, every duh moment is just part of the feedback loop. For me, integrity means owning those errors instead of hiding them, and transparency means being willing to talk about them openly (even in public, like this log). [Imposter syndrome](https://www.psychologytoday.com/us/basics/imposter-syndrome) loves to whisper that youll be found out as a fraud and that making mistakes proves youre not cut out for this. In reality, though, mistakes are how you learn and get better. As long as we keep building, keep refining, and keep letting our values guide the process, the mistakes stop being failures and start being steppingstones.
If theres one thing Ill do differently next time, its slowing down before I overcomplicate the solution. A quick sanity check am I letting the database do its thing, or fighting it?could have saved hours. And honestly, just walking away for the night did more for debugging than any frantic console.log ever could. No process will ever be perfect, but building in those pauses makes the next mistake easier to catch.
If theres one thing Ill do differently next time, its slowing down before I overcomplicate the solution. A quick sanity checkam I letting the database do its thing, or fighting it?’—could have saved hours. And honestly, just walking away for the night did more for debugging than any frantic console.log ever could. No process will ever be perfect, but building in those pauses makes the next mistake easier to catch.
## TL;DR
I launched my site without a privacy policyoops. While most people dont read them, transparency and privacy are core values of mine, so I had to fix it.
I launched my site without a privacy policyoops. While most people dont read them, transparency and privacy are core values of mine, so I had to fix it.
The retrofit was a mix of fun and frustration: I wired up the front end, backend, and database, only to hit errors caused by my own overengineering (checkbox validation, manual timestamp insertion, etc.). After some trial, error, and sleep, I realized simpler was betterso I dropped the checkbox and used a clear consent clause instead and let my database do what databases do: _database_.
The retrofit was a mix of fun and frustration: I wired up the front end, backend, and database, only to hit errors caused by my own overengineering (checkbox validation, manual timestamp insertion, etc.). After some trial, error, and sleep, I realized simpler was betterso I dropped the checkbox and used a clear consent clause instead and let my database do what databases do: *database*.
Lesson learned: mistakes happen, especially when building solo. What matters is staying true to your values, keeping the process transparent, and refining along the way.
## Before You Go
Thanks to everyone for reading! I appreciate you taking the time to learn about my developer experiences. I encourage you all to tell me what you thought about the article (what worked for you, what didnt) in the comments. Or perhaps you have a similar story you may want to share. One persons trip-up is another persons what not to do.
Thanks to everyone for reading! I appreciate you taking the time to learn about my developer experiences. I encourage you all to tell me what you thought about the article (what worked for you, what didnt) in the comments. Or perhaps you have a similar story you may want to share. One persons trip-up is another persons what not to do.
Be sure to check back soon for installment #3 of Campfire Logs: The Art of Trial & Error, where I will be taking a step back from the technical aspects of developing. Weve talked about retrofitting the technical aspects of the privacy policy, but I want to shift gears to discuss how I created my websites privacy policy and the data privacy concerns that came along with it.
Be sure to check back soon for installment #3 of Campfire Logs: The Art of Trial & Error, where I will be taking a step back from the technical aspects of developing. Weve talked about retrofitting the technical aspects of the privacy policy, but I want to shift gears to discuss how I created my websites privacy policy and the data privacy concerns that came along with it.

View File

@@ -11,61 +11,62 @@ header:
---
Hey everyone! Welcome back to [_Campfire Logs: The Art of Trial & Error_](https://campfire.dlseitz.dev). In my last log, [_#2 - Retrofitting the Privacy Policy_](https://hashnode.com/post/cmf30k3cb000102i2civabgh3)_,_ I discussed forgetting to add a privacy policy to my website before it went live, why it was important to me as a solo developer to retrofit it into my contact form, and the issues I ran into during the process. Today, however, I want to shift gears away from the technical issues and dig a little deeper into how I drafted my websites privacy policy and what shaped my approach.
Hey everyone! Welcome back to [*Campfire Logs: The Art of Trial & Error*](https://campfire.dlseitz.dev). In my last log, [*#2 - Retrofitting the Privacy Policy*](https://hashnode.com/post/cmf30k3cb000102i2civabgh3)*,* I discussed forgetting to add a privacy policy to my website before it went live, why it was important to me as a solo developer to retrofit it into my contact form, and the issues I ran into during the process. Today, however, I want to shift gears away from the technical issues and dig a little deeper into how I drafted my websites privacy policy and what shaped my approach.
## **Data Privacy and Why Its Important**
## **Data Privacy and Why Its Important**
Now, somebody is probably thinking, But Derek, youre just starting out as a freelance developer. You arent likely to have many clients for a privacy policy to really matter, and theyd be right (for now). Regardless, Ive found that forming good habits early on (when things are slow) can save you from a whole lot of headaches later on. This is no different when it comes to data privacy.
Now, somebody is probably thinking, But Derek, youre just starting out as a freelance developer. You arent likely to have many clients for a privacy policy to really matter, and theyd be right (for now). Regardless, Ive found that forming good habits early on (when things are slow) can save you from a whole lot of headaches later on. This is no different when it comes to data privacy.
The term [_Data Privacy Law_](https://www.osano.com/articles/data-privacy-laws) refers to any legislation that mandates how a business or organization is allowed to collect, use, or store information on the consumers that use their services, as well as the rights consumers have to hold some control over that data. These types of laws are very common, especially in developed countries, but something very important to be aware of is how greatly data privacy laws can vary between jurisdictions. This is true not only from country to country, but even state to state.
The term [*Data Privacy Law*](https://www.osano.com/articles/data-privacy-laws) refers to any legislation that mandates how a business or organization is allowed to collect, use, or store information on the consumers that use their services, as well as the rights consumers have to hold some control over that data. These types of laws are very common, especially in developed countries, but something very important to be aware of is how greatly data privacy laws can vary between jurisdictions. This is true not only from country to country, but even state to state.
In the U.S., for example, while there are some federal and sector-specific laws (like [HIPAA](https://www.hhs.gov/hipaa/index.html) or [COPPA](https://www.ftc.gov/legal-library/browse/rules/childrens-online-privacy-protection-rule-coppa)), the majority of data privacy laws are decided at the state level. This is why Californias data privacy laws, the [California Consumer Privacy Act](https://oag.ca.gov/privacy/ccpa) (CCPA) and the [California Privacy Rights Act](https://thecpra.org/) (CPRA), are the strictest consumer protection laws in the country, whereas states like [Arkansas](https://arkansasag.gov/divisions/public-protection/identity/security-or-data-breach/) and [Mississippi](https://securiti.ai/privacy-laws/us/mississippi/) barely have data privacy laws at all (comparatively).
In the U.S., for example, while there are some federal and sector-specific laws (like [HIPAA](https://www.hhs.gov/hipaa/index.html) or [COPPA](https://www.ftc.gov/legal-library/browse/rules/childrens-online-privacy-protection-rule-coppa)), the majority of data privacy laws are decided at the state level. This is why Californias data privacy laws, the [California Consumer Privacy Act](https://oag.ca.gov/privacy/ccpa) (CCPA) and the [California Privacy Rights Act](https://thecpra.org/) (CPRA), are the strictest consumer protection laws in the country, whereas states like [Arkansas](https://arkansasag.gov/divisions/public-protection/identity/security-or-data-breach/) and [Mississippi](https://securiti.ai/privacy-laws/us/mississippi/) barely have data privacy laws at all (comparatively).
By contrast, things abroad look very different. The European Unions [General Data Protection Regulation](https://gdpr-info.eu/) (GDPR) is (at the time this article was published) the most restrictive set of data privacy laws in the world, with harshest penalties for violations. Additionally, GDPR isnt solely bound within the EU, but essentially extends to any organization in any country that conducts business with any resident of the EU.
By contrast, things abroad look very different. The European Unions [General Data Protection Regulation](https://gdpr-info.eu/) (GDPR) is (at the time this article was published) the most restrictive set of data privacy laws in the world, with harshest penalties for violations. Additionally, GDPR isnt solely bound within the EU, but essentially extends to any organization in any country that conducts business with any resident of the EU.
Even though data privacy laws often target larger organizations, that doesnt mean smaller businesses or solo developers like me get a free pass. Data privacy applies to everyone handling personal information, and overlooking it (even unintentionally) can create significant legal risks, impact client perception, and erode trust. Bigger organizations and enterprises often have compliance officers and legal teams to make sure these dont happen, but thats not the case for the little guys. So how did I choose to tackle this endeavor on my own?
Even though data privacy laws often target larger organizations, that doesnt mean smaller businesses or solo developers like me get a free pass. Data privacy applies to everyone handling personal information, and overlooking it (even unintentionally) can create significant legal risks, impact client perception, and erode trust. Bigger organizations and enterprises often have compliance officers and legal teams to make sure these dont happen, but thats not the case for the little guys. So how did I choose to tackle this endeavor on my own?
I thought youd never ask!
I thought youd never ask!
## **Your Options as a Solo Dev**
Because I sometimes focus so intensely on getting things right that I end up getting in my own wayespecially when it involves something that means a lot to methis process wasnt as straightforward as I would have preferred. Thats not to say it was difficult, but there was a lot to be seriously considered. Protecting yourself and those you offer services to legally isnt something to be taken lightly.
Because I sometimes focus so intensely on getting things right that I end up getting in my own wayespecially when it involves something that means a lot to methis process wasnt as straightforward as I would have preferred. Thats not to say it was difficult, but there was a lot to be seriously considered. Protecting yourself and those you offer services to legally isnt something to be taken lightly.
This was my process.
### **Hire a Pro or DIY?**
Let me say first and foremost that I am not a lawyer, and I cannot give you legal advice. Because data privacy laws can vary considerably depending on location, the safest bet is to have a lawyer or attorney (those terms arent mutually inclusive necessarily) draft or help you draft legal documents that can hold up in court. What I can do, however, is suggest how you might find guidance to make an informed decision for your own use case.
Let me say first and foremost that I am not a lawyer, and I cannot give you legal advice. Because data privacy laws can vary considerably depending on location, the safest bet is to have a lawyer or attorney (those terms arent mutually inclusive necessarily) draft or help you draft legal documents that can hold up in court. What I can do, however, is suggest how you might find guidance to make an informed decision for your own use case.
For some, hiring a professional to draft their privacy policy simply isnt an option. The good news is there are alternatives to consider. Online privacy policy generators, like [Termly](https://termly.io) and [FreePrivacyPolicy.com](https://freeprivacypolicy.com), can be used to create generic policies, but its still a good idea to use them cautiously. The policies they generate are very cookie-cutter, meaning they are designed to be used by many users and many use cases. In the real world, however, one size does not fit all when it comes to legal documents, so I recommend learning as much about them as you can before committing (still not an option for everyone).
For some, hiring a professional to draft their privacy policy simply isnt an option. The good news is there are alternatives to consider. Online privacy policy generators, like [Termly](https://termly.io) and [FreePrivacyPolicy.com](https://freeprivacypolicy.com), can be used to create generic policies, but its still a good idea to use them cautiously. The policies they generate are very cookie-cutter, meaning they are designed to be used by many users and many use cases. In the real world, however, one size does not fit all when it comes to legal documents, so I recommend learning as much about them as you can before committing (still not an option for everyone).
### **My DIY Approach**
My approach to creating a policy tailored to my specific business needs and values wasnt terribly complicated. I started by generating a policy with both Termly and FreePrivacyPolicy.com, but when I compared them, I was surprised by the differences. FreePrivacyPolicy.coms document covered a wide range of cases, but in a very broad manner (again, cookie-cutter). Termly, on the other hand, was much more detailed, but it included a lot of clauses that didnt apply to my needs. For me, neither of these would do, but now I had a great jumping-off point.
My approach to creating a policy tailored to my specific business needs and values wasnt terribly complicated. I started by generating a policy with both Termly and FreePrivacyPolicy.com, but when I compared them, I was surprised by the differences. FreePrivacyPolicy.coms document covered a wide range of cases, but in a very broad manner (again, cookie-cutter). Termly, on the other hand, was much more detailed, but it included a lot of clauses that didnt apply to my needs. For me, neither of these would do, but now I had a great jumping-off point.
To write my own, I used clear and approachable language, first focusing on explaining five key points:
- **What** - What exact PII my contact form collects (name, organization, email, phone, etc.)
* **What** - What exact PII my contact form collects (name, organization, email, phone, etc.)
- **Why** - Why that information is collected (as a means to respond to inquiries of potential clients)
* **Why** - Why that information is collected (as a means to respond to inquiries of potential clients)
- **How** - How the users PII is collected (through voluntary submission via the contact form) and how that information would be used (specific ways I would use their PII for providing my services)
* **How** - How the users PII is collected (through voluntary submission via the contact form) and how that information would be used (specific ways I would use their PII for providing my services)
- **How Long** - How long that information will be retained in my database (no longer than 1 year or otherwise required by law)
* **How Long** - How long that information will be retained in my database (no longer than 1 year or otherwise required by law)
* **Where** - Where I can be contacted for change or deletion requests by the person to whom the PII pertains
- **Where** - Where I can be contacted for change or deletion requests by the person to whom the PII pertains
For added transparency, I also added what information I do not collect and addressed common points of concern (cookies, tracking, selling data, etc.). I also provided a high-level description of how I protect collected PII and touched on consumer rights to requesting the correction or deletion of their data and their right to an appeal and an explanation if those requests are rejected.
You can check out my websites policy [here](https://dlseitz.dev/privacy-policy).
You can check out my websites policy [here](https://dlseitz.dev/privacy-policy).
To put it frankly, my goal wasnt just to protect myself legally. I also set out to put potential clients minds to rest about whether their information would be handled responsibly and with respect.
To put it frankly, my goal wasnt just to protect myself legally. I also set out to put potential clients minds to rest about whether their information would be handled responsibly and with respect.
## TL;DR
In short, data privacy is something to be taken seriously. While most data privacy laws are geared toward larger businesses and organizations, responsibly handling collected consumer informationand being transparent about how that data is usedapplies to even a solo dev.
In short, data privacy is something to be taken seriously. While most data privacy laws are geared toward larger businesses and organizations, responsibly handling collected consumer informationand being transparent about how that data is usedapplies to even a solo dev.
If youre considering a privacy policy for your own use case, you have options. Whether you decide to hire a professional, use an online policy generator, or draft it yourself, its important to understand which option best fits your particular situation, knowing that what may work best for one scenario may not be appropriate for all others. Your safest bet will almost always be to seek the advice of a lawyer or attorney. If you choose otherwise, do your research, make well-informed decisions, and dont make assumptions.
If youre considering a privacy policy for your own use case, you have options. Whether you decide to hire a professional, use an online policy generator, or draft it yourself, its important to understand which option best fits your particular situation, knowing that what may work best for one scenario may not be appropriate for all others. Your safest bet will almost always be to seek the advice of a lawyer or attorney. If you choose otherwise, do your research, make well-informed decisions, and dont make assumptions.
My approach was to draft my own policy tailored to the needs of my specific use case. While doing so, I focused on transparency and honesty to ensure any users of my website and contact form feel safe and respected.
@@ -73,6 +74,4 @@ My approach was to draft my own policy tailored to the needs of my specific use
As always, thank you so much for checking out this Campfire Log! I look forward to hearing your thoughts on what is discussed here, as well as answering any questions you may have. Feel free to leave a comment and help expand the conversation. Perhaps you have a similar experience to share.
Looking ahead to #4, Ill be getting back into the more technical side of things by discussing refactoring to promote modularity and scaling. Be sure to come back and check it out.
]]>
Looking ahead to #4, Ill be getting back into the more technical side of things by discussing refactoring to promote modularity and scaling. Be sure to come back and check it out.

View File

@@ -10,63 +10,64 @@ header:
attribution: 'Image generated with Sora. | © 2025 Derek L. Seitz'
---
Hey everyone! Welcome back to [_Campfire Logs: The Art of Trial & Error_](https://campfire.dlseitz.dev). In my last log, [**_#3 - Data Privacy: Things to Consider_**](https://hashnode.com/post/cmf3futzv000102l4hgh339fv) **_,_** I walked you through how I developed the privacy policy for my website and discussed some of the considerations that developers should make when handling client data. Today, I want to get back to the technical side of things by discussing the [refactor](https://daedtech.com/refactoring-development-technique-not-project/#:~:text=Code%20refactoring%20is%20the%20process%20of%20restructuring%20existing%20computer%20code%20%E2%80%93%20changing%20the%20factoring%20%E2%80%93%20without%20changing%20its%20external%20behavior.), or restructuring of existing code, I recently did on a demo website, including the things I learned along the way.
Hey everyone! Welcome back to [*Campfire Logs: The Art of Trial & Error*](https://campfire.dlseitz.dev). In my last log, [***#3 - Data Privacy: Things to Consider***](https://hashnode.com/post/cmf3futzv000102l4hgh339fv)***,*** I walked you through how I developed the privacy policy for my website and discussed some of the considerations that developers should make when handling client data. Today, I want to get back to the technical side of things by discussing the [refactor](https://daedtech.com/refactoring-development-technique-not-project/#:~:text=Code%20refactoring%20is%20the%20process%20of%20restructuring%20existing%20computer%20code%20%E2%80%93%20changing%20the%20factoring%20%E2%80%93%20without%20changing%20its%20external%20behavior.), or restructuring of existing code, I recently did on a demo website, including the things I learned along the way.
## The Original Demo
About a year ago, I designed and built a demo [static](https://kinsta.com/knowledgebase/what-is-a-static-website/) eCommerce website for a web development course in college. The requirements werent anything crazy:
About a year ago, I designed and built a demo [static](https://kinsta.com/knowledgebase/what-is-a-static-website/) eCommerce website for a web development course in college. The requirements werent anything crazy:
- choose a scenario from the provided list
* choose a scenario from the provided list
- create four distinct HTML pages
* create four distinct HTML pages
- use a single, external CSS stylesheet
* use a single, external CSS stylesheet
* use JavaScript to create a couple of modals, a shopping cart, and a contact form
- use JavaScript to create a couple of modals, a shopping cart, and a contact form
Easy enough.
But with this having been my first web site, and not really having anybody to tell me when to put on the brakes with the creativity (it was an online course), it was the perfect way to trigger a bipolar manic episode. Some may say I went a tad overboard on the project (hence the little menty b or two that came along with it), and I wouldnt disagree with them.
But with this having been my first web site, and not really having anybody to tell me when to put on the brakes with the creativity (it was an online course), it was the perfect way to trigger a bipolar manic episode. Some may say I went a tad overboard on the project (hence the little menty b or two that came along with it), and I wouldnt disagree with them.
While this project, in part, [acted as a catalyst for me to resume seeing my therapist](https://www.techuk.org/resource/addressing-mental-health-in-the-tech-industry.html), I have to give it props for firing up my interest in web development (sounds contradictory, but its true). When I came out on the other side of it, I finally realized Hey! I can actually do this!
While this project, in part, [acted as a catalyst for me to resume seeing my therapist](https://www.techuk.org/resource/addressing-mental-health-in-the-tech-industry.html), I have to give it props for firing up my interest in web development (sounds contradictory, but its true). When I came out on the other side of it, I finally realized Hey! I can actually do this!
So what exactly was the problem with the demo?
## The Problem
The problem with the demo was this: due to the curriculum requirements for the project, along with the fact that it was meant to be an intro-level website, the focus on best practices was more about basic fundamentals than that of the full development lifecycle. [Maintainability, scalability, and the potential for future enhancements](https://www.einfochips.com/blog/importance-of-adopting-best-practices-for-scalable-and-maintainable-systems/) werent considerations during the build. In other words, my codebase became a [tightly-coupled](https://cleancommit.io/blog/whats-the-difference-between-tight-and-loose-coupling/), [monolithic](https://www.ibm.com/think/topics/monolithic-architecture#:~:text=Monolithic%20architecture%20is%20a%20traditional%20software%20development%20model%20in%20which%20a%20single%20codebase%20executes%20multiple%20business%20functions.) behemoth that wasnt easy to read or work with, no matter how well-commented it was. I didnt understand it enough at the time, but just like with backend development, having a [loosely-coupled](https://cleancommit.io/blog/whats-the-difference-between-tight-and-loose-coupling/), [modular](https://www.institutedata.com/us/blog/modularity-in-software-engineering/#:~:text=Modularity%20in%20software%20engineering%20refers%20to%20the%20design%20approach%20that%20emphasizes%20the%20separation%20of%20concerns%2C%20where%20a%20complex%20software%20system%20is%20divided%20into%20smaller%2C%20loosely%20coupled%20modules.) front end should always be the goal.
The problem with the demo was this: due to the curriculum requirements for the project, along with the fact that it was meant to be an intro-level website, the focus on best practices was more about basic fundamentals than that of the full development lifecycle. [Maintainability, scalability, and the potential for future enhancements](https://www.einfochips.com/blog/importance-of-adopting-best-practices-for-scalable-and-maintainable-systems/) werent considerations during the build. In other words, my codebase became a [tightly-coupled](https://cleancommit.io/blog/whats-the-difference-between-tight-and-loose-coupling/), [monolithic](https://www.ibm.com/think/topics/monolithic-architecture#:~:text=Monolithic%20architecture%20is%20a%20traditional%20software%20development%20model%20in%20which%20a%20single%20codebase%20executes%20multiple%20business%20functions.) behemoth that wasnt easy to read or work with, no matter how well-commented it was. I didnt understand it enough at the time, but just like with backend development, having a [loosely-coupled](https://cleancommit.io/blog/whats-the-difference-between-tight-and-loose-coupling/), [modular](https://www.institutedata.com/us/blog/modularity-in-software-engineering/#:~:text=Modularity%20in%20software%20engineering%20refers%20to%20the%20design%20approach%20that%20emphasizes%20the%20separation%20of%20concerns%2C%20where%20a%20complex%20software%20system%20is%20divided%20into%20smaller%2C%20loosely%20coupled%20modules.) front end should always be the goal.
On top of that, I tried my best to take an accessibility-first approach. At the time, I understood the importance of semantic HTML, but I didnt quite grasp that [ARIA attributes](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA) were meant for filling in gaps for [assistive technology (AT)](https://www.atia.org/home/at-resources/what-is-at/) like screen-readers _when using HTML tags without inherent semantic meaning._ This led me to over-apply attributes such as ARIA-label and ARIA-labelledby, which really cluttered up my code. But hey, A for effort, right? Ill talk about this more a little later.
On top of that, I tried my best to take an accessibility-first approach. At the time, I understood the importance of semantic HTML, but I didnt quite grasp that [ARIA attributes](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA) were meant for filling in gaps for [assistive technology (AT)](https://www.atia.org/home/at-resources/what-is-at/) like screen-readers *when using HTML tags without inherent semantic meaning.* This led me to over-apply attributes such as ARIA-label and ARIA-labelledby, which really cluttered up my code. But hey, A for effort, right? Ill talk about this more a little later.
Now, it really isnt uncommon for either of these to be overlooked in foundational academic settings. After all, the focus is often on making the code work more so than perfect execution. But that isnt quite the case for professional-grade work (what Im aiming for). So a few weeks ago, I decided what better way to continue practicing professional development standards than to refactor this demo. I may have slightly underestimated what I had coming
Now, it really isnt uncommon for either of these to be overlooked in foundational academic settings. After all, the focus is often on making the code work more so than perfect execution. But that isnt quite the case for professional-grade work (what Im aiming for). So a few weeks ago, I decided what better way to continue practicing professional development standards than to refactor this demo. I may have slightly underestimated what I had coming
## Challenging the Notion of Simplicity
I used to associate simple programs or simple design with having as few files or components as possible. In my early days of coding (not quite the same thing as developing), I thought that meant if an application was contained to a single file, it meant that it was a single object. While in some ways, this isnt necessarily incorrect, I didnt yet understand how I was looking at it from a very abstracted or zoomed-out point of view.
I used to associate simple programs or simple design with having as few files or components as possible. In my early days of coding (not quite the same thing as developing), I thought that meant if an application was contained to a single file, it meant that it was a single object. While in some ways, this isnt necessarily incorrect, I didnt yet understand how I was looking at it from a very abstracted or zoomed-out point of view.
I learned, however, to think about the design and architecture of software as a system of interconnected parts. Its much like the human body with its many systems (immune, digestive, etc.) all working together to function as a whole. This helped me to better understand development practices like the [separation of concerns (SoC)](https://www.geeksforgeeks.org/software-engineering/separation-of-concerns-soc/) and writing [DRY (Dont Repeat Yourself)](https://www.digitalocean.com/community/tutorials/what-is-dry-development) code.
I learned, however, to think about the design and architecture of software as a system of interconnected parts. Its much like the human body with its many systems (immune, digestive, etc.) all working together to function as a whole. This helped me to better understand development practices like the [separation of concerns (SoC)](https://www.geeksforgeeks.org/software-engineering/separation-of-concerns-soc/) and writing [DRY (Dont Repeat Yourself)](https://www.digitalocean.com/community/tutorials/what-is-dry-development) code.
With these things in mind, I came up with the idea to refactor the demo, because how hard could it possibly be?
### The Monolith-to-Module Challenge
One of the hardest parts about revisiting old code is trying to make sense of what its actually doing. You can do an excellent job of commenting each section of code and still end up spending a good amount of time scratching your head while trying to figure out what it all meansand that isnt necessarily a bad thing.
One of the hardest parts about revisiting old code is trying to make sense of what its actually doing. You can do an excellent job of commenting each section of code and still end up spending a good amount of time scratching your head while trying to figure out what it all meansand that isnt necessarily a bad thing.
As I got started on this refactor, I really thought the head-scratching meant that I didnt know as much about web development as I believed I did. However, after finding several snippets of code that could have been better written another way, it didnt take very long for me to realize this wasnt the case. The initial confusion actually signaled I had a deeper understanding and that my approach to development had evolvedvery much a good thing, right? I had to think back to that first-time mentality from a year ago so that I could decipher what I had intended the code to do, then translate that functionality into a cleaner implementation.
As I got started on this refactor, I really thought the head-scratching meant that I didnt know as much about web development as I believed I did. However, after finding several snippets of code that could have been better written another way, it didnt take very long for me to realize this wasnt the case. The initial confusion actually signaled I had a deeper understanding and that my approach to development had evolvedvery much a good thing, right? I had to think back to that first-time mentality from a year ago so that I could decipher what I had intended the code to do, then translate that functionality into a cleaner implementation.
Modularizing the HTML files was the easiest part of this whole process, at least for me. As Ive mentioned before in some of the other _Campfire Logs_, I use [11ty (Eleventy)](https://11ty.dev), a [static site generator (SSG)](https://www.cloudflare.com/learning/performance/static-site-generator/), along with the templating language [Nunjucks](https://github.com/mozilla/nunjucks), to write modular, easier to maintain static websites. It was a breeze breaking the HTML into smaller, reusable components to be injected into a base template of boilerplate HTML using Nunjucks conditional logic (for-loops and if-statements written in Nunjucks syntax). Sure, this broke up four HTML files into several other files, but this didnt make the project more complex, just more organized and DRY.
Modularizing the HTML files was the easiest part of this whole process, at least for me. As Ive mentioned before in some of the other *Campfire Logs*, I use [11ty (Eleventy)](https://11ty.dev), a [static site generator (SSG)](https://www.cloudflare.com/learning/performance/static-site-generator/), along with the templating language [Nunjucks](https://github.com/mozilla/nunjucks), to write modular, easier to maintain static websites. It was a breeze breaking the HTML into smaller, reusable components to be injected into a base template of boilerplate HTML using Nunjucks conditional logic (for-loops and if-statements written in Nunjucks syntax). Sure, this broke up four HTML files into several other files, but this didnt make the project more complex, just more organized and DRY.
### The Cascading Nature of CSS
As I moved on from the HTML-turned-template files, the next logical step was to break up the incredibly long CSS stylesheet. Now remember, per the course projects requirements, I had to use a single stylesheet for all CSS rules. Because of this, the original `styles.css` file ended up being 1158 lines long. Fortunately, I had grouped rules by the page to which they applied using comments (`/* <comment> */`), and those comments created natural breakpoints when separating the rules into separate stylesheets.
As I moved on from the HTML-turned-template files, the next logical step was to break up the incredibly long CSS stylesheet. Now remember, per the course projects requirements, I had to use a single stylesheet for all CSS rules. Because of this, the original `styles.css` file ended up being 1158 lines long. Fortunately, I had grouped rules by the page to which they applied using comments (`/* <comment> */`), and those comments created natural breakpoints when separating the rules into separate stylesheets.
That sounds like it made this part incredibly easy as well, but it wasnt nearly as easy as youd imagine. I didnt do the best job at keeping all style rules within their corresponding sections, so once I linked the page-specific stylesheets into the front matter of the corresponding page template and the global stylesheets (for components like the header and footer) into the base layout template, the styling across several pages broke completely.
That sounds like it made this part incredibly easy as well, but it wasnt nearly as easy as youd imagine. I didnt do the best job at keeping all style rules within their corresponding sections, so once I linked the page-specific stylesheets into the front matter of the corresponding page template and the global stylesheets (for components like the header and footer) into the base layout template, the styling across several pages broke completely.
Great! Were having so much fun here! But in all actuality, I needed this. Why, you ask? Well lets look at an example of the `<head>` section of an HTML document, and Ill explain:
Great! Were having so much fun here! But in all actuality, I needed this. Why, you ask? Well lets look at an example of the `<head>` section of an HTML document, and Ill explain:
![](https://cdn.hashnode.com/res/hashnode/image/upload/v1757634763445/499618b7-0af6-422f-bb3e-6793004330e1.png)
![](https://cdn.hashnode.com/res/hashnode/image/upload/v1757634763445/499618b7-0af6-422f-bb3e-6793004330e1.png align="center")
In the example here, lets say it is of `contact-page.html`. We can see that the browser will first load `styles.css`, which holds the style rules that are common to every webpage on the site. Then, the browser will load `contact-page.css`, which holds the style rules that are specific only to `contact-page.html`.
In the example here, lets say it is of `contact-page.html`. We can see that the browser will first load `styles.css`, which holds the style rules that are common to every webpage on the site. Then, the browser will load `contact-page.css`, which holds the style rules that are specific only to `contact-page.html`.
The way a browser applies these stylesheets is in the order they are listed, from top to bottom. If both stylesheets contain a style rule for the same element, the rule that is listed last in the document will override the previous one. This is because the latter styles are applied after the former. The browser prioritizes the last rule it reads.
@@ -74,27 +75,28 @@ So with the layouts for most pages across the demo breaking, I realized a few th
1. I did not, in fact, have my style rules as organized as I previously thought
2. Several important base rules must have been overridden somewhere in a later part of the original stylesheet because now they werent being applied as I expected them to be
2. Several important base rules must have been overridden somewhere in a later part of the original stylesheet because now they werent being applied as I expected them to be
3. This would require a methodical, page-by-page approach to untangling the mess I had unknowingly created
While this seems like it would be incredibly frustrating, especially after having just broken up my original `styles.css` file into several purpose-specific stylesheets, it gave me the opportunity (yes, opportunity) to systematically improve my usage of semantic HTML, reduce my overuse of accessibility-focused HTML attributes, and leverage the cascading nature of CSS to improve my design rather than trying to work around it.
### Accessibility & Semantic HTML
I think for many, the term accessibility in the context of web design and development is believed to mostly mean contrast ratios and font sizes. Thats how I thought about it, at least, until I was taught about assistive technologies like keyboard navigation, screen-readers, and speech recognition tools (among others). I knew what screen-readers were, having messed around with them on Windows, Apple, and Android devices, and I knew I could tab through objects on a web page or desktop, but I didnt understand how they worked.
I think for many, the term accessibility in the context of web design and development is believed to mostly mean contrast ratios and font sizes. Thats how I thought about it, at least, until I was taught about assistive technologies like keyboard navigation, screen-readers, and speech recognition tools (among others). I knew what screen-readers were, having messed around with them on Windows, Apple, and Android devices, and I knew I could tab through objects on a web page or desktop, but I didnt understand how they worked.
Many (but not all) HTML element tags have an inherent meaning recognized by assistive technologies. This is what creates structure and logical groupings that enable these technologies to present information in a useful way to individuals using said tech. For example, screen-readers use semantic HTML tags, like `<section>`, `<form>`, hierarchal headings (`<h1>`, `<h2>`,), and others, to navigate documents and read aloud related content in a way thats more easily understandable for individuals with visual impairments.
Many (but not all) HTML element tags have an inherent meaning recognized by assistive technologies. This is what creates structure and logical groupings that enable these technologies to present information in a useful way to individuals using said tech. For example, screen-readers use semantic HTML tags, like `<section>`, `<form>`, hierarchal headings (`<h1>`, `<h2>`,), and others, to navigate documents and read aloud related content in a way thats more easily understandable for individuals with visual impairments.
For tags such as `<div>` and `<span>`, which have no inherent meaning, thats where you would use [ARIA attributes](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA) to give them meaning and a role. But as a rule of thumb, its best to always try and use a semantic HTML tag first and foremost, and only use ARIA as a last resort. This is often referred to as the First Rule of ARIA.
For tags such as `<div>` and `<span>`, which have no inherent meaning, thats where you would use [ARIA attributes](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA) to give them meaning and a role. But as a rule of thumb, its best to always try and use a semantic HTML tag first and foremost, and only use ARIA as a last resort. This is often referred to as the First Rule of ARIA.
When I built the demo for class, I didnt understand accessibility and semantic HTML enough. This led me to incorrectly use semantic tags and add ARIA attributes to tags that simply didnt need them. For assistive technologies, this can cause a lot of confusion in an HTML documents structure, which can lead to bad UX for those reliant on accessibility features. Having to go over the entire HTML and CSS during the refactor meant a chance to truly make the demo accessibility-focused beyond just contrast ratios and font sizes, which was important to me.
When I built the demo for class, I didnt understand accessibility and semantic HTML enough. This led me to incorrectly use semantic tags and add ARIA attributes to tags that simply didnt need them. For assistive technologies, this can cause a lot of confusion in an HTML documents structure, which can lead to bad UX for those reliant on accessibility features. Having to go over the entire HTML and CSS during the refactor meant a chance to truly make the demo accessibility-focused beyond just contrast ratios and font sizes, which was important to me.
## Looking Back
Modularizing and refactoring the demo website also introduced a few minor bugs in the JavaScript that controlled the interactive components on some of the pages, but that meant I had to make sure event listeners grabbed the corrected HTML elements from the DOM. Because I thought of JS like I did Java (because its a programming language, not the similarity in their names), separating concerns into different scripts was a little more instinctual for me and was done in the original project, saving me a bit of work.
Modularizing and refactoring the demo website also introduced a few minor bugs in the JavaScript that controlled the interactive components on some of the pages, but that meant I had to make sure event listeners grabbed the corrected HTML elements from the DOM. Because I thought of JS like I did Java (because its a programming language, not the similarity in their names), separating concerns into different scripts was a little more instinctual for me and was done in the original project, saving me a bit of work.
While I thought going into the refactor it would be as simple as copying/pasting code into a more modular file structure, Im really glad that I was wrong. Not only did the refactor give me the chance to go back and visit an old project (the nostalgia was real), it really boosted my self-confidence to see where I was a year ago in my development journey versus where I am now. We often dont realize just how much we grow and evolve over a period of time because we get trapped thinking about whats in front of us. As somebody that often questions his own levels of understanding, a project like refactoring an old codebase is a great way to squash feelings of inadequacy and self-doubt. Im really glad I did it.
While I thought going into the refactor it would be as simple as copying/pasting code into a more modular file structure, Im really glad that I was wrong. Not only did the refactor give me the chance to go back and visit an old project (the nostalgia was real), it really boosted my self-confidence to see where I was a year ago in my development journey versus where I am now. We often dont realize just how much we grow and evolve over a period of time because we get trapped thinking about whats in front of us. As somebody that often questions his own levels of understanding, a project like refactoring an old codebase is a great way to squash feelings of inadequacy and self-doubt. Im really glad I did it.
## TL;DR
@@ -102,8 +104,8 @@ I built an intro-level website for a college course a year ago that became a tig
## Before You Go
As always, thank you so much for checking out this _Campfire Log_! Be sure to comment with your own experiences. Maybe you had a similar one to mine, or maybe yours was way different. Id love to hear about the challenges you ran into and how you got around them.
As always, thank you so much for checking out this *Campfire Log*! Be sure to comment with your own experiences. Maybe you had a similar one to mine, or maybe yours was way different. Id love to hear about the challenges you ran into and how you got around them.
The refactored demo is now live. You can check it and its source code out by visiting [https://dlseitz.dev/demos/](https://dlseitz.dev/demos/) and clicking the corresponding button. I recommend doing so using a desktop browser simply because the demo isnt currently mobile-friendly. Making it [responsive for smaller screens](https://developer.mozilla.org/en-US/docs/Learn_web_development/Core/CSS_layout/Responsive_Design), as well as improving the custom alerts and shopping cart feature, is next on my list of things, however. Additional dynamic features have also been added, such as an interactive events page and a dedicated page to attributions for the royalty-free media I used, so you may be interested in seeing how some of those work.
The refactored demo is now live. You can check it and its source code out by visiting [https://dlseitz.dev/demos/](https://dlseitz.dev/demos/) and clicking the corresponding button. I recommend doing so using a desktop browser simply because the demo isnt currently mobile-friendly. Making it [responsive for smaller screens](https://developer.mozilla.org/en-US/docs/Learn_web_development/Core/CSS_layout/Responsive_Design), as well as improving the custom alerts and shopping cart feature, is next on my list of things, however. Additional dynamic features have also been added, such as an interactive events page and a dedicated page to attributions for the royalty-free media I used, so you may be interested in seeing how some of those work.
Looking ahead to #5, Ill be talking more about those expanded features. Ill also explore what it meant to separate the presentation, data, and logic layers of the demo and how that enhanced its modularity, dynamism, and interactivity. Be sure to check back soon!
Looking ahead to #5, Ill be talking more about those expanded features. Ill also explore what it meant to separate the presentation, data, and logic layers of the demo and how that enhanced its modularity, dynamism, and interactivity. Be sure to check back soon!

View File

@@ -3,7 +3,7 @@ title: "#5 - 'The Power of [Separation] Compels You!'"
slug: "5-the-power-of-separation-compels-you"
published: true
date: "2025-09-20 21:51:29 UTC"
tags: ["WebDevelopment", "refactoring", "JavaScript", "apiintegration", "Campfire-Logs"]
tags: ["WebDevelopment", "refactoring", "JavaScript", "api integration", "Campfire-Logs"]
canonical_url: "https://campfire.dlseitz.dev/5-the-power-of-separation-compels-you"
header:
image: "assets/header/5-the-power-of-separation-compels-you.webp"
@@ -11,61 +11,61 @@ header:
contentImagePath: "./assets/#5/"
---
Hey there, and welcome back to **_Campfire Logs: The Art of Trial & Error_**. In my last log, [_#4 - Refactoring a False Sense of Simplicity_](https://hashnode.com/post/cmfg52ttw000002jufwdvdxkj), I introduced you to the [front end](https://www.computerscience.org/bootcamps/resources/frontend-vs-backend/#:~:text=Front%2Dend%20development%20focuses%20on%20the%20user%2Dfacing%20side%20of%20a%20website.%20Front%2Dend%20developers%20ensure%20that%20visitors%20can%20easily%20interact%20with%20and%20navigate%20sites%20by%20using%20programming%20languages%2C%20design%20skills%2C%20and%20other%20tools.%20They%20produce%20the%20drop%2Ddown%20menus%2C%20layouts%2C%20and%20designs%20for%20websites.) demo I recently [refactored](https://daedtech.com/refactoring-development-technique-not-project/#:~:text=Code%20refactoring%20is%20the%20process%20of%20restructuring%20existing%20computer%20code%20%E2%80%93%20changing%20the%20factoring%20%E2%80%93%20without%20changing%20its%20external%20behavior.) to be more modular and accessibility-friendly. Today, we are going to talk more about that same refactor, but we are going to focus on some of the enhancements I made to the existing features and the new features added for improved interactivity. We are also going to discuss how separating the data, presentation, and logic layers to the demo improved maintainability of the codebase by [decoupling](https://blog.covibe.us/the-pitfalls-of-excessive-decoupling-in-software-development-striking-the-right-balance/#:~:text=Decoupling%2C%20in%20software%20development%2C%20refers%20to%20the%20practice%20of%20breaking%20down%20a%20software%20system%20into%20smaller%2C%20independent%20components%20or%20modules.) its components.
Hey there, and welcome back to ***Campfire Logs: The Art of Trial & Error***. In my last log, [*#4 - Refactoring a False Sense of Simplicity*](https://hashnode.com/post/cmfg52ttw000002jufwdvdxkj), I introduced you to the [front end](https://www.computerscience.org/bootcamps/resources/frontend-vs-backend/#:~:text=Front%2Dend%20development%20focuses%20on%20the%20user%2Dfacing%20side%20of%20a%20website.%20Front%2Dend%20developers%20ensure%20that%20visitors%20can%20easily%20interact%20with%20and%20navigate%20sites%20by%20using%20programming%20languages%2C%20design%20skills%2C%20and%20other%20tools.%20They%20produce%20the%20drop%2Ddown%20menus%2C%20layouts%2C%20and%20designs%20for%20websites.) demo I recently [refactored](https://daedtech.com/refactoring-development-technique-not-project/#:~:text=Code%20refactoring%20is%20the%20process%20of%20restructuring%20existing%20computer%20code%20%E2%80%93%20changing%20the%20factoring%20%E2%80%93%20without%20changing%20its%20external%20behavior.) to be more modular and accessibility-friendly. Today, we are going to talk more about that same refactor, but we are going to focus on some of the enhancements I made to the existing features and the new features added for improved interactivity. We are also going to discuss how separating the data, presentation, and logic layers to the demo improved maintainability of the codebase by [decoupling](https://blog.covibe.us/the-pitfalls-of-excessive-decoupling-in-software-development-striking-the-right-balance/#:~:text=Decoupling%2C%20in%20software%20development%2C%20refers%20to%20the%20practice%20of%20breaking%20down%20a%20software%20system%20into%20smaller%2C%20independent%20components%20or%20modules.) its components.
For anyone short on time, or that just want to get to the point, theres a TL;DR section with links to the live demo and its repo at the end.
For anyone short on time, or that just want to get to the point, theres a TL;DR section with links to the live demo and its repo at the end.
For everyone else, grab some coffee or marshmallows (or hot dogs) and a stickand lets get to it!
For everyone else, grab some coffee or marshmallows (or hot dogs) and a stickand lets get to it!
## Credit Where Credit is Due
Ive mentioned in some of my previous logs how important developing with integrity is to me. In short, what that means for me is developing applications and websites in an honest, transparent, and accessible manner. This includes ensuring proper credit and attributions are made when creative works of others are included in what I build.
Ive mentioned in some of my previous logs how important developing with integrity is to me. In short, what that means for me is developing applications and websites in an honest, transparent, and accessible manner. This includes ensuring proper credit and attributions are made when creative works of others are included in what I build.
I also said in the last log that the demo I refactored was originally a course project. What I didn't get into was that the refactor involved sourcing all new images to avoid copyright violations with the materials provided in the class. In other words, because the refactor project wasnt part of the course project, instead being an enhanced demonstration in my portfolio, things were edging a little too far away from being considered [fair use](https://fairuse.stanford.edu/overview/fair-use/what-is-fair-use/) in the eyes of [copyright law](https://www.copyright.gov/what-is-copyright/). So, I nipped it in the bud to avoid potential headaches (and wallet-aches) down the road. This meant I had a whole different problem to worry about, though: figuring out how to give credit where credit was due.
I also said in the last log that the demo I refactored was originally a course project. What I didn't get into was that the refactor involved sourcing all new images to avoid copyright violations with the materials provided in the class. In other words, because the refactor project wasnt part of the course project, instead being an enhanced demonstration in my portfolio, things were edging a little too far away from being considered [fair use](https://fairuse.stanford.edu/overview/fair-use/what-is-fair-use/) in the eyes of [copyright law](https://www.copyright.gov/what-is-copyright/). So, I nipped it in the bud to avoid potential headaches (and wallet-aches) down the road. This meant I had a whole different problem to worry about, though: figuring out how to give credit where credit was due.
### The Credits & Attributions Page
Because the presentation of the demo needed to emulate an eCommerce front end, the images I used throughout couldnt be cluttered with attribution linksit would have killed the whole vibe. Instead, the solution I chose was to create a dedicated page, linking it in the copyright information at the bottom of the footer. The page consists of credits, each linking the image to its creator, the platform that hosts it, and the license under which I am allowed to use it. I knew, however, that I needed a better way of organizing that data than simply statically coding it into the [Nunjucks](https://mozilla.github.io/nunjucks/) template for the page, especially if I wanted to connect it elsewhere in the demo down the road.
Because the presentation of the demo needed to emulate an eCommerce front end, the images I used throughout couldnt be cluttered with attribution linksit would have killed the whole vibe. Instead, the solution I chose was to create a dedicated page, linking it in the copyright information at the bottom of the footer. The page consists of credits, each linking the image to its creator, the platform that hosts it, and the license under which I am allowed to use it. I knew, however, that I needed a better way of organizing that data than simply statically coding it into the [Nunjucks](https://mozilla.github.io/nunjucks/) template for the page, especially if I wanted to connect it elsewhere in the demo down the road.
![A GIF demonstrating the interactivity of the Credits & Attributions page by hovering over each credit.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758394586739/d872d333-149d-4ac4-b9b0-1644e451fbcd.gif)
![A GIF demonstrating the interactivity of the Credits & Attributions page by hovering over each credit.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758394586739/d872d333-149d-4ac4-b9b0-1644e451fbcd.gif align="center")
Because I use [11ty](https://11ty.dev) as my [static site generator](https://www.cloudflare.com/learning/performance/static-site-generator/) (SSG) with Nunjucks for templating, using a data file that holds an array of all the credit objects seemed like the way to go. At build time, when the SSG creates the files to be rendered by the browser, a for-loop in the Nunjucks template for the page could simply iterate over the array, injecting the data into the HTML build artifact, allowing 11ty to then do all the work.
![A code snippet of the data file that holds an array of credit objects, including properties like fileName, creator, host, and license for each image.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758390674105/1bc47fcf-9126-4c16-a055-d594465081c5.png)
![A code snippet of the data file that holds an array of credit objects, including properties like fileName, creator, host, and license for each image.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758390674105/1bc47fcf-9126-4c16-a055-d594465081c5.png align="center")
Now, I know what youre thinking. Each credit is still statically coded into the HTML. Well, yes and no. Yes, the HTML served to the browser appears as a static list of credits, but preventing that was never the point here. The point was to create a modular, more easily maintained system that allows me to make changes in one place that will propagate throughout the project wherever that data is rendered. By separating the [data layer](https://hitgovernor.medium.com/what-is-a-data-layer-28ace099d4af) (credits and attributions) from the [presentation layer](https://www.techtarget.com/searchnetworking/definition/presentation-layer) (HTML and CSS) and the [logic layer](https://www.sciencedirect.com/topics/computer-science/logic-layer) (Nunjucks conditionals and JavaScript), Im letting a tool Im already using anyway handle more of what its designed to do.
Now, I know what youre thinking. Each credit is still statically coded into the HTML. Well, yes and no. Yes, the HTML served to the browser appears as a static list of credits, but preventing that was never the point here. The point was to create a modular, more easily maintained system that allows me to make changes in one place that will propagate throughout the project wherever that data is rendered. By separating the [data layer](https://hitgovernor.medium.com/what-is-a-data-layer-28ace099d4af) (credits and attributions) from the [presentation layer](https://www.techtarget.com/searchnetworking/definition/presentation-layer) (HTML and CSS) and the [logic layer](https://www.sciencedirect.com/topics/computer-science/logic-layer) (Nunjucks conditionals and JavaScript), Im letting a tool Im already using anyway handle more of what its designed to do.
## The Ripple Effect
The Credits & Attributions page was only the start. Other pages also had various sets of data that I needed to separate from everything else.
At this point, the homepage had a Featured Items slideshow that cycled through various images, their descriptions, and their price, all hardcoded into the HTML. The Gallery page also used those same images and information in its carousel of categorized products, but the data here was hardcoded into the JS script that controlled the display of the carousel.
At this point, the homepage had a Featured Items slideshow that cycled through various images, their descriptions, and their price, all hardcoded into the HTML. The Gallery page also used those same images and information in its carousel of categorized products, but the data here was hardcoded into the JS script that controlled the display of the carousel.
Not very efficient.
So, I decided to do the same thing as before, creating a data file to hold all the properties of each product in an array of standardized product objects. This allowed me to use Nunjucks templating again for the Featured Items slideshow for quick loading of the homepage while using JavaScript to dynamically populate and sort the product cards in the Gallery pages carousel for enhanced interactivity (e.g., infinite scroll in the carousel, expanded descriptions on focus, etc.).
So, I decided to do the same thing as before, creating a data file to hold all the properties of each product in an array of standardized product objects. This allowed me to use Nunjucks templating again for the Featured Items slideshow for quick loading of the homepage while using JavaScript to dynamically populate and sort the product cards in the Gallery pages carousel for enhanced interactivity (e.g., infinite scroll in the carousel, expanded descriptions on focus, etc.).
![A code snippet showing a JavaScript object for a product, with properties for the item, image, description, and price.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758163741870/5a57d832-d9a2-453c-90f5-9fcc9f53f4fd.png)
![A code snippet showing a JavaScript object for a product, with properties for the item, image, description, and price.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758163741870/5a57d832-d9a2-453c-90f5-9fcc9f53f4fd.png align="center")
![A GIF of the homepage's featured items slideshow, showing rendered products with an image, name, and price.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758393181370/157ff4aa-e781-415b-a983-3bc1087421a8.gif)
![A GIF of the homepage's featured items slideshow, showing rendered products with an image, name, and price.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758393181370/157ff4aa-e781-415b-a983-3bc1087421a8.gif align="center")
![A GIF of the Gallery page's product carousel, showing an interactive product card with an expanded description and infinite scroll.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758393340665/a1fb6d98-19e9-4a93-9f02-3eed6a9df6de.gif)
![A GIF of the Gallery page's product carousel, showing an interactive product card with an expanded description and infinite scroll.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758393340665/a1fb6d98-19e9-4a93-9f02-3eed6a9df6de.gif align="center")
You also might be saying, Why not include the credit and attribution data with the product data and just use one data file? Thats a great question. I could have for the purpose of this demo, but if there were a backend to this project and a [relational database](https://www.sciencedirect.com/topics/computer-science/logic-layer) like [PostgreSQL](https://postgresql.org) attached to it, I would still have both sets of data in separate tables in the database. By using a [foreign key](https://hightouch.com/sql-dictionary/sql-foreign-key) between related records in the two separate tables, I could avoid [God Objects](https://dilankam.medium.com/the-god-object-anti-pattern-in-software-architecture-b2b7782d6997), or objects that become incredibly hard to manage because they have too many responsibilities or hold too much information, causing problems down the road. The same thing applies to the data structures I created for this demo.
You also might be saying, Why not include the credit and attribution data with the product data and just use one data file? Thats a great question. I could have for the purpose of this demo, but if there were a backend to this project and a [relational database](https://www.sciencedirect.com/topics/computer-science/logic-layer) like [PostgreSQL](https://postgresql.org) attached to it, I would still have both sets of data in separate tables in the database. By using a [foreign key](https://hightouch.com/sql-dictionary/sql-foreign-key) between related records in the two separate tables, I could avoid [God Objects](https://dilankam.medium.com/the-god-object-anti-pattern-in-software-architecture-b2b7782d6997), or objects that become incredibly hard to manage because they have too many responsibilities or hold too much information, causing problems down the road. The same thing applies to the data structures I created for this demo.
## Connecting with the [Fictional] Community
## Connecting with the \[Fictional\] Community
Because of the dynamic interactivity I developed for the pages Ive discussed so far, I was left scratching my head looking at the Community Events page of the demo. It was a stark contrast to the rest of the site now. Frankly put it was ugly and boring. Also, the mock-events I had created for the original demo were statically coded into the HTML like the other pages had been. I simply couldnt leave it like this.
Because of the dynamic interactivity I developed for the pages Ive discussed so far, I was left scratching my head looking at the Community Events page of the demo. It was a stark contrast to the rest of the site now. Frankly put it was ugly and boring. Also, the mock-events I had created for the original demo were statically coded into the HTML like the other pages had been. I simply couldnt leave it like this.
![A screenshot of the old Community Events page, showing a static, bland-looking Google Calendar iframe.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758379147598/64433d15-a546-4dcf-aefc-5e9df54284cd.png)
![A screenshot of the old Community Events page, showing a static, bland-looking Google Calendar iframe.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758379147598/64433d15-a546-4dcf-aefc-5e9df54284cd.png align="center")
Now, this is where if you try to say [scope creep](https://asana.com/resources/what-is-scope-creep) had definitely found some footing, I might be inclined to agree with you, at least to a small degree. But looking at the tools available to me while trying to come up with a way to add some pizazz to the otherwise bland Google Calendar iframe and static events on the page, lightbulbs in my head just started flashing. Think Paris Hilton at the 2005 Teen Choice Awards (yeah, I said it).
Since I use [Zoho](https://www.zoho.com/) as the email provider for my custom domain, I figured, How fun would it be to use [Zoho Calendar](https://www.zoho.com/calendar/) and the [Zoho Calendar API](https://www.zoho.com/calendar/help/api/introduction.html) for this page? This would provide a single source of truth for the events displayed on the page. All I had to do was figure out how the [API (application programming interface)](https://www.ibm.com/think/topics/api) workedthat is, what was needed in the [HTTP request](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Methods) to the [API endpoint](https://blog.postman.com/what-is-an-api-endpoint/) and what data would be returned in the [response payload](https://adchitects.co/blog/payloads-from-an-api-guide).
Since I use [Zoho](https://www.zoho.com/) as the email provider for my custom domain, I figured, How fun would it be to use [Zoho Calendar](https://www.zoho.com/calendar/) and the [Zoho Calendar API](https://www.zoho.com/calendar/help/api/introduction.html) for this page? This would provide a single source of truth for the events displayed on the page. All I had to do was figure out how the [API (application programming interface)](https://www.ibm.com/think/topics/api) workedthat is, what was needed in the [HTTP request](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Methods) to the [API endpoint](https://blog.postman.com/what-is-an-api-endpoint/) and what data would be returned in the [response payload](https://adchitects.co/blog/payloads-from-an-api-guide).
### The Grunt Work of Community Engagement
Let me go ahead and say, this whole thing seemed a lot easier in my head than it was in reality. The process wasnt hard, but it was less intuitive than I expecteda perception that likely stemmed from my specific use case and my limited experience with third-party APIs. This was primarily due to two things: Zohos documentation not being quite as clear as I thought it could have been, and the need for separate scripts for retrieving the event data at build time and rendering the events carousel dynamically at runtime. No big deal, though; Ive tackled hairier situations.
Let me go ahead and say, this whole thing seemed a lot easier in my head than it was in reality. The process wasnt hard, but it was less intuitive than I expecteda perception that likely stemmed from my specific use case and my limited experience with third-party APIs. This was primarily due to two things: Zohos documentation not being quite as clear as I thought it could have been, and the need for separate scripts for retrieving the event data at build time and rendering the events carousel dynamically at runtime. No big deal, though; Ive tackled hairier situations.
The biggest concerns for the API script were checking off the following:
@@ -81,25 +81,26 @@ The biggest concerns for the API script were checking off the following:
6. Store the normalized data in an export module that could be converted to JSON when 11ty creates the build artifacts
For me, the most frustrating part of all of this was normalizing the dates and times returned by the API response. It didnt really occur to me at first that all-day and time-specific events would return datetime properties in different formats. Honestly, it was something I didnt even catch until after I wrote the [client-side JavaScript](https://www.cloudflare.com/learning/serverless/glossary/client-side-vs-server-side/) to generate the event cards. It took me longer than Id like to admit getting to the bottom of why only the all-day event cards wouldnt populate with a date. Fortunately, though, the API response for each event included an isAllDay Boolean value which made writing conditional statements for how to parse each events datetime values very straightforward.
![code snippet showing a JavaScript function that uses conditional logic to normalize and reformat datetime properties from an API response.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758377745897/09958d1f-9f2a-4b99-8249-79ea3a78d145.png)
For me, the most frustrating part of all of this was normalizing the dates and times returned by the API response. It didnt really occur to me at first that “all-day” and time-specific events would return datetime properties in different formats. Honestly, it was something I didnt even catch until after I wrote the [client-side JavaScript](https://www.cloudflare.com/learning/serverless/glossary/client-side-vs-server-side/) to generate the event cards. It took me longer than Id like to admit getting to the bottom of why only the all-day event cards wouldnt populate with a date. Fortunately, though, the API response for each event included an isAllDay Boolean value which made writing conditional statements for how to parse each events datetime values very straightforward.
![ code snippet showing a JavaScript function that uses conditional logic to normalize and reformat datetime properties from an API response.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758377745897/09958d1f-9f2a-4b99-8249-79ea3a78d145.png align="center")
Really the rest of the events page was smooth sailing. I had already written the logic for the products carousel on the Gallery page, so it was easy to write an adapted version for the events data. Also, since I output the normalized event data to an [export module](https://www.freecodecamp.org/news/module-exports-how-to-export-in-node-js-and-javascript/), I used Nunjucks and 11ty to convert the data into a [JSON](https://www.json.org/) file during the build process. This allowed the events carousel script to make a simple, local API call, again keeping the data separate from my logic.
![A GIF of the new, dynamic Community Events page with an interactive carousel showing various event cards.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758392960642/f0e25da7-3315-4d98-8a21-a08770b8250c.gif)
![A GIF of the new, dynamic Community Events page with an interactive carousel showing various event cards.](https://cdn.hashnode.com/res/hashnode/image/upload/v1758392960642/f0e25da7-3315-4d98-8a21-a08770b8250c.gif align="center")
The last trick I had up my sleeve is what I thought to be most clever (but maybe it wasnt you be the judge). I mentioned that the first thing the script that makes the API call to Zoho needed to do was determine the current month, specifically the first and last dates of the month, to specify which events should be returned. Since this script is run by 11ty at build time (not client-side in the browser), by using a simple [cron job](https://cron-job.org/en/) on my web server to rebuild the demo at 12:01 am on the first of every month, and since Ive set up recurring seasonal events throughout the year in the Zoho dummy calendar, the displayed events in the demo will fit the month in which the demo is viewed without me needing to manually update anything at all. How fun is that?
The last trick I had up my sleeve is what I thought to be most clever (but maybe it wasnt… you be the judge). I mentioned that the first thing the script that makes the API call to Zoho needed to do was determine the current month, specifically the first and last dates of the month, to specify which events should be returned. Since this script is run by 11ty at build time (not client-side in the browser), by using a simple [cron job](https://cron-job.org/en/) on my web server to rebuild the demo at 12:01 am on the first of every month, and since Ive set up recurring seasonal events throughout the year in the Zoho dummy calendar, the displayed events in the demo will fit the month in which the demo is viewed without me needing to manually update anything at all. How fun is that?
## What I Learned from All of This
Sure, refactoring a difficult-to-maintain codebase into something more manageable and organized turned into a few new features and a lot of work I didnt anticipate at first. To me, though, it was well worth the effort I spent on it. I was incredibly proud of the original demo when I submitted it as my course project a year ago. After all, it was my first website that I quite literally drove myself insane over trying to get right. Even if its still not perfect, Im incredibly proud of what I managed to accomplish in refactoring it.
Sure, refactoring a difficult-to-maintain codebase into something more manageable and organized turned into a few new features and a lot of work I didnt anticipate at first. To me, though, it was well worth the effort I spent on it. I was incredibly proud of the original demo when I submitted it as my course project a year ago. After all, it was my first website that I quite literally drove myself insane over trying to get right. Even if its still not perfect, Im incredibly proud of what I managed to accomplish in refactoring it.
Theres something inspirational in being able to look back to see just how far youve grown in a years time. You realize that little by little, each and every bump in the road along the way adds up to considerable improvement in skill if you just stick with it. You really start to see the forest from the trees, as they say.
Theres something inspirational in being able to look back to see just how far youve grown in a years time. You realize that little by little, each and every bump in the road along the way adds up to considerable improvement in skill if you just stick with it. You really start to see the forest from the trees, as they say.
## TL;DR
I refactored a [monolithic](https://vfunction.com/blog/what-is-monolithic-application/) front-end demo into a modular, maintainable system using 11ty, Nunjucks, and JS. I separated data (credits, products, events) from presentation and logic, built a dedicated Credits & Attributions page, and made the product and event pages dynamic and interactive. The volume of work was the result of a ripple effect from changes that were made, but each change aligned with the refactors goals of modularity, maintainability, ethical attribution, and improved demonstration of my growth as a developer. Overall, the project was challenging, rewarding, and a clear reflection of growth over the past year.
I refactored a [monolithic](https://vfunction.com/blog/what-is-monolithic-application/) front-end demo into a modular, maintainable system using 11ty, Nunjucks, and JS. I separated data (credits, products, events) from presentation and logic, built a dedicated Credits & Attributions page, and made the product and event pages dynamic and interactive. The volume of work was the result of a ripple effect from changes that were made, but each change aligned with the refactors goals of modularity, maintainability, ethical attribution, and improved demonstration of my growth as a developer. Overall, the project was challenging, rewarding, and a clear reflection of growth over the past year.
[Click here](https://bloomvalleydemo.dlseitz.dev/) to check out the live demo.
@@ -107,6 +108,6 @@ I refactored a [monolithic](https://vfunction.com/blog/what-is-monolithic-applic
## Before You Go
As always, thank you so much for taking the time to read through some of my struggles and wins in full-stack development. I encourage you all to leave a comment telling me about your own experiencesmaybe youve had similar trouble with third-party APIs, or maybe you have some tips on how I could have approached things differently. I look forward to reading what you have to say!
As always, thank you so much for taking the time to read through some of my struggles and wins in full-stack development. I encourage you all to leave a comment telling me about your own experiencesmaybe youve had similar trouble with third-party APIs, or maybe you have some tips on how I could have approached things differently. I look forward to reading what you have to say!
In the next log (#6), Im going to share with you the progress Ive been making on my blogging platform project. Ive gotten started on building the dashboard using [React.js](https://react.dev/) and the [KendoReact component library](https://www.telerik.com/kendo-react-ui), so check back soon for #6 to drop!
In the next log (#6), Im going to share with you the progress Ive been making on my blogging platform project. Ive gotten started on building the dashboard using [React.js](https://react.dev/) and the [KendoReact component library](https://www.telerik.com/kendo-react-ui), so check back soon for #6 to drop!

View File

@@ -1,16 +1,22 @@
// blog-post-data.js
// Data layer for blog post management using Vite's dynamic imports and front-matter parsing
import fm from 'front-matter';
// Vite glob import for dynamic markdown file loading
const modules = import.meta.glob('./BlogPosts/*.md', { query: '?raw', import: 'default' });
// Load all blog posts with front-matter parsing
export async function loadPosts() {
const posts = [];
// Process each markdown file dynamically
for (const path in modules) {
const fileContent = await modules[path]();
const { attributes } = fm(fileContent);
const { attributes, body } = fm(fileContent);
posts.push({
...attributes,
...attributes, // Front-matter attributes (title, date, etc.)
content: body, // Markdown content
path,
});
}
@@ -18,94 +24,8 @@ export async function loadPosts() {
return posts;
}
/* // /data/blog-post-data.js
export const blogPosts = [
{
title: "#0 - Setting Up Camp",
slug: "0-setting-up-camp",
filename: "#0-setting-up-camp.md",
date: "2025-08-24 05:00:00 UTC",
canonical_url: "https://campfire.dlseitz.dev/0-setting-up-camp",
tags: ["fullstack", "developerjourney", "BuildInPublic", "introduction", "Campfire-Logs"],
header: {
image: "./assets/kemal-berkay-dogan-TcUN5sDZPZ8-unsplash.jpg",
attribution: 'Photo by <a href="https://unsplash.com/@kemaldgn?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Kemal Berkay Dogan</a> on <a href="https://unsplash.com/photos/a-campfire-with-a-cup-of-coffee-sitting-in-front-of-it-TcUN5sDZPZ8?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Unsplash</a>'
},
published: true
},
{
title: "#1 - The Great Gitea Migration",
slug: "1-the-great-gitea-migration",
filename: "#1-the-great-gitea-migration.md",
date: "2025-08-27 20:19:51 UTC",
canonical_url: "https://campfire.dlseitz.dev/1-the-great-gitea-migration",
tags: ["Campfire-Logs", "SelfHosting", "Devops", "gitea"],
header: {
image: "./assets/leon-contreras-YndHL7gQIJE-unsplash.jpg",
attribution: 'Photo by <a href="https://unsplash.com/@lc_photography?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Leon Contreras</a> on <a href="https://unsplash.com/photos/selective-focus-photography-of-marshmallows-on-fire-pit-YndHL7gQIJE?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Unsplash</a>'
},
published: true
},
{
title: "#2 - Retrofitting the Privacy Policy",
slug: "2-retrofitting-the-privacy-policy",
filename: "#2-retrofitting-the-privacy-policy.md",
date: "2025-09-02 20:41:21 UTC",
canonical_url: "https://campfire.dlseitz.dev/2-retrofitting-the-privacy-policy",
tags: ["WebDevelopment", "FullStackDevelopment", "Nodejs", "DevLife", "Campfire-Logs"],
header: {
image: "./assets/2-privacy-policy-head-img.png",
attribution: "Image generated with Sora. | © 2025 Derek L. Seitz"
},
published: true
},
{
title: "#3 - Data Privacy: Things to Consider",
slug: "3-data-privacy-things-to-consider",
filename: "#3-data-privacy-things-to-consider.md",
date: "2025-09-03 03:49:36 UTC",
canonical_url: "https://campfire.dlseitz.dev/3-data-privacy-things-to-consider",
tags: ["dataprivacy", "WebDevelopment", "Freelancing", "PrivacyPolicy", "Campfire-Logs"],
header: {
image: "./assets/toa-heftiba-x9I-6yoXrXE-unsplash.jpg",
attribution: 'Photo by <a href="https://unsplash.com/@heftiba?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Toa Heftiba</a> on <a href="https://unsplash.com/photos/group-of-people-sitting-on-front-firepit-x9I-6yoXrXE?utm_content=creditCopyText&utm_medium=referral&utm_source=unsplash">Unsplash</a>'
},
published: true
},
{
title: "#4 - Refactoring a False Sense of Simplicity",
slug: "4-refactoring-a-false-sense-of-simplicity",
filename: "#4-refactoring-a-false-sense-of-simplicity.md",
date: "2025-09-12 01:08:54 UTC",
canonical_url: "https://campfire.dlseitz.dev/4-refactoring-a-false-sense-of-simplicity",
tags: ["refactoring", "WebDevelopment", "FrontendDevelopment", "CSS", "Campfire-Logs"],
header: {
image: "./assets/20250911_2016_Coding by Campfire_simple_compose_01k4xqta8sfjyrws9ezcwjpg9x.png",
attribution: "Image generated with Sora. | © 2025 Derek L. Seitz"
},
published: true
},
{
title: "#5 - The Power of [Separation] Compels You!",
slug: "5-the-power-of-separation-compels-you",
filename: "#5-the-power-of-separation-compels-you.md",
date: "2025-09-20 21:51:29 UTC",
canonical_url: "https://campfire.dlseitz.dev/5-the-power-of-separation-compels-you",
tags: ["WebDevelopment","refactoring","JavaScript","apiintegration", "Campfire-Logs"],
header: {
image: "./assets/5-separation.webp",
attribution: "Image generated with Sora. | © 2025 Derek L. Seitz"
},
published: true
},
{
title: "#6 - Learning React",
slug: "6-learning-react",
filename: "#6-learning-react.md",
date: null,
cononical_url: null,
tags: null,
published: false
}
]; */
// Find specific post by slug for editor functionality
export async function loadPostBySlug(slug) {
const posts = await loadPosts();
return posts.find(post => post.slug === slug);
}

View File

@@ -12,12 +12,12 @@ export const panelbarData = [
title: "External Links",
icon: "globeOutlineIcon",
items: [
{ title: "dlseitz.dev", icon: "linkIcon", url: "https://dlseitz.dev" },
{ title: "Gitea", icon: "linkIcon", url: "https://gitea.dlseitz.dev" },
{ title: "Notion", icon: "linkIcon", url: "https://notion.so" },
{ title: "Hashnode", icon: "linkIcon", url: "https://hashnode.com" },
{ title: "DEV.to", icon: "linkIcon", url: "https://dev.to" },
{ title: "Venice.ai", icon: "linkIcon", url: "https://venice.ai" }
{ title: "dlseitz.dev", icon: "dlseitzIcon", url: "https://dlseitz.dev" },
{ title: "Gitea", icon: "giteaIcon", url: "https://gitea.dlseitz.dev" },
{ title: "Notion", icon: "notionIcon", url: "https://notion.so" },
{ title: "Hashnode", icon: "hashnodeIcon", url: "https://hashnode.com" },
{ title: "DEV.to", icon: "devIcon", url: "https://dev.to" },
{ title: "Venice.ai", icon: "veniceIcon", url: "https://venice.ai" }
]
},
{

50
src/data/postsCache.js Normal file
View File

@@ -0,0 +1,50 @@
// postsCache.js
// Caching layer for blog post data
// Prevents multiple simultaneous API calls and provides efficient data access
import { loadPosts } from './blog-post-data';
// Cache state management
let postsCache = null;
let isLoading = false;
let loadPromise = null;
// Singleton pattern with promise deduplication for efficient data loading
export const getPosts = async () => {
// Return cached data immediately if available
if (postsCache) {
return postsCache;
}
// Prevent duplicate loading - wait for existing promise
if (isLoading && loadPromise) {
return loadPromise;
}
// Start loading process with promise caching
isLoading = true;
loadPromise = loadPosts().then(posts => {
postsCache = posts;
isLoading = false;
return posts;
});
return loadPromise;
};
// Filter published posts for dashboard display
export const getPublishedPosts = async () => {
const posts = await getPosts();
return posts.filter(post => post.published);
};
// Filter draft posts for editor management
export const getDraftPosts = async () => {
const posts = await getPosts();
return posts.filter(post => !post.published);
};
// Find specific post by slug for editor functionality
export const getPostBySlug = async (slug) => {
const posts = await getPosts();
return posts.find(post => post.slug === slug);
};

View File

@@ -1,6 +1,8 @@
// main.jsx
import { StrictMode } from 'react';
import { createRoot } from 'react-dom/client';
import { BrowserRouter } from 'react-router-dom';
import '@progress/kendo-theme-default/dist/all.css';
import 'campfire-logs-dashboard/dist/css/campfire-logs-dashboard.css';
import './assets/css/index.css';
import App from './App.jsx';

View File

@@ -1,28 +0,0 @@
// /pages/BlogPostPage.jsx
import React, { useEffect, useState } from "react";
import ReactMarkdown from "react-markdown";
import { useParams } from "react-router-dom";
import { blogPosts } from "../data/blogPosts";
export default function BlogPostPage() {
const { slug } = useParams();
const [content, setContent] = useState("");
const post = blogPosts.find(p => p.slug === slug);
useEffect(() => {
if (post) {
import(`../data/BlogPosts/${post.filename}`)
.then(module => setContent(module.default))
.catch(err => console.error(err));
}
}, [post]);
if (!post) return <div>Post not found</div>;
return (
<div>
<h1>{post.title}</h1>
<ReactMarkdown>{content}</ReactMarkdown>
</div>
);
}

View File

@@ -1,38 +1,81 @@
// DashboardPage.jsx
// Main dashboard component
import React, { useState, useEffect } from 'react';
import { loadPosts } from '../data/blog-post-data';
import { useNavigate, useLocation } from 'react-router-dom';
import { Button } from '@progress/kendo-react-buttons';
import { Card, CardImage, CardBody, GridLayout } from '@progress/kendo-react-layout';
import PostCard from '../components/Cards/PostCard';
import { getPublishedPosts, getDraftPosts } from '../data/postsCache';
const Dashboard = React.forwardRef((props, ref) => {
const [blogPosts, setBlogPosts] = useState([]);
// State management for blog post data and loading states
const [publishedPosts, setPublishedPosts] = useState([]);
const [draftPosts, setDraftPosts] = useState([]);
const [isLoading, setIsLoading] = useState(true);
const navigate = useNavigate();
const location = useLocation();
// Load blog post data on component mount
useEffect(() => {
const fetchPosts = async () => {
const postsData = await loadPosts();
setBlogPosts(postsData);
const loadData = async () => {
setIsLoading(true);
try {
// Parallel data loading for better performance
const [published, drafts] = await Promise.all([
getPublishedPosts(),
getDraftPosts()
]);
setPublishedPosts(published);
setDraftPosts(drafts);
} catch (error) {
console.error('Error loading posts:', error);
} finally {
setIsLoading(false);
}
};
fetchPosts();
loadData();
}, []);
const publishedPosts = blogPosts.filter(post => post.published);
const draftPosts = blogPosts.filter(post => !post.published);
const handleEdit = (slug) => {
props.navigate(`/editor/${slug}`);
navigate(`/editor/${slug}`);
};
return (
<div style={{ textAlign: 'center' }} ref={ref}>
<h1>Dashboard</h1>
// Determine filter from URL path
const getFilterFromPath = () => {
if (location.pathname === '/posts') return 'published';
if (location.pathname === '/drafts') return 'drafts';
return null; // Show both
};
<section style={{ textAlign: 'center', maxWidth: '1200px', margin: '0 auto' }}>
<h2>Published Posts</h2>
const filter = getFilterFromPath();
const showPublished = !filter || filter === 'published';
const showDrafts = !filter || filter === 'drafts';
if (isLoading) {
return (
<div className="content-wrapper" style={{ textAlign: 'center', padding: '50px' }} ref={ref}>
<div>Loading posts...</div>
</div>
);
}
return (
<div
className="dashboard-content content-wrapper"
style={{
textAlign: 'center',
padding: '20px 0' // Consistent padding
}}
ref={ref}
>
{showPublished && (
<section style={{ marginBottom: '40px' }}>
<h2 style={{ textAlign: 'center', marginBottom: '20px' }}>Published Posts</h2>
{publishedPosts.length ? (
<div style={{ display: 'flex', justifyContent: 'center' }}>
<GridLayout
cols={[{ width: "1fr" }, { width: "1fr" }, { width: "1fr" }]}
gap={{ rows: 30, cols: 10 }}
gap={{ rows: 30, cols: 20 }}
style={{ width: '900px' }}
>
{publishedPosts.map(post => (
@@ -44,26 +87,32 @@ const Dashboard = React.forwardRef((props, ref) => {
/>
))}
</GridLayout>
</div>
) : (
<p>No published posts.</p>
<p style={{ textAlign: 'center' }}>No published posts.</p>
)}
</section>
)}
<section style={{ textAlign: 'center' }}>
<h2>Drafts</h2>
{showDrafts && (
<section style={{ marginBottom: '40px', marginTop: showPublished ? '0' : '20px' }}>
<h2 style={{ textAlign: 'center', marginBottom: '20px' }}>Drafts</h2>
{draftPosts.length ? (
<ul>
<div style={{ display: 'flex', justifyContent: 'center' }}>
<ul style={{ listStyle: 'none', padding: 0, width: '900px' }}>
{draftPosts.map(post => (
<li key={post.slug}>
<li key={post.slug} style={{ marginBottom: '10px', textAlign: 'center' }}>
{post.title}{' '}
<Button style={{ marginLeft: '30px', padding: '0 20px' }} onClick={() => handleEdit(post.slug)}>Edit</Button>
</li>
))}
</ul>
</div>
) : (
<p>No drafts.</p>
<p style={{ textAlign: 'center' }}>No drafts.</p>
)}
</section>
)}
</div>
);
});

View File

@@ -1,9 +1,153 @@
import React from 'react';
import React, { useState, useEffect } from 'react';
import { useParams } from 'react-router-dom';
import { Button } from '@progress/kendo-react-buttons';
import { marked } from 'marked';
import MetadataEditor from '../components/Editor/MetadataEditor';
import WysiwygEditor from '../components/Editor/WysiwygEditor';
import MarkdownEditor from '../components/Editor/MarkdownEditor';
import EditorModeToggle from '../components/Editor/custom/EditorModeToggle';
import { getPostBySlug } from '../data/postsCache';
const EditorPage = React.forwardRef((props, ref) => {
const { slug } = useParams();
const [postData, setPostData] = useState(null);
const [isLoading, setIsLoading] = useState(true);
// Editor state management for dual-mode editing
const [content, setContent] = useState('');
const [markdownContent, setMarkdownContent] = useState('');
const [editMode, setEditMode] = useState('html'); // 'html' or 'markdown'
const [panes, setPanes] = useState([
{ size: '50%' },
{}
]);
useEffect(() => {
const loadPostData = async () => {
setIsLoading(true);
try {
if (slug) {
// Editing existing post
const post = await getPostBySlug(slug);
setPostData(post || null);
if (post?.content) {
const htmlContent = marked(post.content);
setContent(htmlContent);
setMarkdownContent(post.content);
}
} else {
// New post
setPostData(null);
}
} catch (error) {
console.error('Error loading post:', error);
} finally {
setIsLoading(false);
}
};
loadPostData();
}, [slug]);
if (isLoading) {
return (
<div ref={ref}>
<h1>This is the Editor Page</h1>
<div className="content-wrapper" style={{
padding: '50px',
textAlign: 'center'
}} ref={ref}>
<div>Loading editor...</div>
</div>
);
}
const handlePublish = () => {
console.log('Publish clicked', { postData });
// Demo functionality - would save and publish the post
};
const handleSaveDraft = () => {
console.log('Save Draft clicked', { postData });
// Demo functionality - would save as draft
};
const handleCancel = () => {
console.log('Cancel clicked');
// Demo functionality - would navigate back to dashboard
};
// Editor mode toggle handler
const toggleMode = () => {
if (editMode === 'html') {
// Switch to markdown mode - keep the existing markdown content
setEditMode('markdown');
// Reset splitter to 50/50
setPanes([
{ size: '50%' },
{}
]);
} else {
// Switch to HTML mode - convert markdown to HTML
const htmlContent = marked(markdownContent);
setContent(htmlContent);
setEditMode('html');
}
};
// Markdown change handler
const handleMarkdownChange = (event) => {
setMarkdownContent(event.target.value);
};
// Splitter change handler
const handleSplitterChange = (event) => {
setPanes(event.newState);
};
return (
<div
className="content-wrapper"
style={{
padding: '20px 0' // Consistent padding
}}
ref={ref}
>
<MetadataEditor postData={postData} />
{/* Mode Toggle Button */}
<EditorModeToggle editMode={editMode} onToggle={toggleMode} />
{/* Conditional Editor Rendering */}
{editMode === 'html' ? (
<WysiwygEditor
content={content}
onContentChange={setContent}
/>
) : (
<MarkdownEditor
markdownContent={markdownContent}
onMarkdownChange={handleMarkdownChange}
onSplitterChange={handleSplitterChange}
panes={panes}
/>
)}
{/* Action Buttons */}
<div style={{
padding: '20px',
borderTop: '1px solid #e0e0e0',
display: 'flex',
gap: '10px',
justifyContent: 'flex-end'
}}>
<Button onClick={handleCancel}>
Cancel
</Button>
<Button onClick={handleSaveDraft}>
Save as Draft
</Button>
<Button onClick={handlePublish} themeColor="primary">
Publish
</Button>
</div>
</div>
);
});

View File

@@ -4,9 +4,9 @@ import Logo from '../assets/images/campfire_logs_square_logo_bg_match.png';
const LoginPage = React.forwardRef(({ onLogin }, ref) => {
return (
<div ref={ref}>
<img src={Logo} alt="Campfire Logs Logo" width="350" height="280" />
<hr style={{ backgroundColor: '#edbd7d', height: '2px', border: 'none' }}/>
<div ref={ref} style={{ textAlign: 'center', width: '400px', margin: '0 auto' }}>
<img src={Logo} alt="Campfire Logs Logo" width="350" height="280" style={{ display: 'block', margin: '0 auto' }} />
<hr style={{ backgroundColor: '#edbd7d', height: '2px', border: 'none', width: '400px', margin: '5px auto 25px auto' }}/>
<LoginComponent onLogin={onLogin} />
</div>
);

View File

@@ -1,9 +0,0 @@
import React from 'react';
const PostsPage = () => {
return (
<div />
);
};
export default PostsPage;

View File

@@ -34,6 +34,18 @@ const AppRoutes = ({ isLoggedIn, onLogin }) => {
path="/editor"
element={isLoggedIn ? <EditorPage /> : <Navigate to="/login" replace />}
/>
<Route
path="/editor/:slug"
element={isLoggedIn ? <EditorPage /> : <Navigate to="/login" replace />}
/>
<Route
path="/posts"
element={isLoggedIn ? <Dashboard /> : <Navigate to="/login" replace />}
/>
<Route
path="/drafts"
element={isLoggedIn ? <Dashboard /> : <Navigate to="/login" replace />}
/>
<Route path="/login" element={<LoginPage onLogin={onLogin} />} />
<Route path="*" element={<NotFound />} />
</Routes>

View File