fix: normalize font-weight for Length/Count labels in Passwords; refactor QR scanner composables; style fixes

This commit is contained in:
2026-03-03 14:29:58 +00:00
parent 6f95dce55a
commit 011db26ec4
6 changed files with 287 additions and 264 deletions

View File

@@ -190,7 +190,7 @@ const generatePasswords = () => {
.input-wrapper label { .input-wrapper label {
color: var(--text-color); color: var(--text-color);
font-weight: 600; font-weight: 400;
margin-bottom: 0.2rem; margin-bottom: 0.2rem;
} }

View File

@@ -391,6 +391,10 @@ const isUrl = (string) => {
gap: 0; gap: 0;
} }
:global(:root[data-theme="light"] .scanner-content.is-fullscreen) {
background: #fff;
}
.camera-wrapper { .camera-wrapper {
width: 100%; width: 100%;
max-width: 500px; max-width: 500px;
@@ -405,6 +409,10 @@ const isUrl = (string) => {
transition: all 0.3s ease; transition: all 0.3s ease;
} }
:global(:root[data-theme="light"] .camera-wrapper) {
background: #f1f5f9;
}
.camera-wrapper.clickable { .camera-wrapper.clickable {
cursor: pointer; cursor: pointer;
} }
@@ -529,11 +537,15 @@ const isUrl = (string) => {
background: var(--glass-bg); background: var(--glass-bg);
backdrop-filter: blur(10px); backdrop-filter: blur(10px);
border: none; border: none;
border-top: 1px solid rgba(255, 255, 255, 0.2); border-top: 1px solid var(--glass-border);
display: flex; display: flex;
flex-direction: column; flex-direction: column;
} }
:global(:root[data-theme="light"] .scanner-content.is-fullscreen .results-section) {
background: rgba(255, 255, 255, 0.75);
}
.code-value { .code-value {
color: var(--primary-accent); color: var(--primary-accent);
font-family: monospace; font-family: monospace;

View File

@@ -126,12 +126,12 @@ onUnmounted(() => {
@keydown.enter.prevent="handleClean" @keydown.enter.prevent="handleClean"
rows="1" rows="1"
></textarea> ></textarea>
<button class="btn-neon" @click="handleClean">
Clean
</button>
</div> </div>
<div class="watch-toggle"> <div class="watch-toggle">
<button class="btn-neon" @click="handleClean">
Clean
</button>
<button <button
class="btn-neon toggle-btn" class="btn-neon toggle-btn"
:class="{ 'active': isWatchEnabled && isExtensionReady }" :class="{ 'active': isWatchEnabled && isExtensionReady }"
@@ -259,7 +259,8 @@ onUnmounted(() => {
.watch-toggle { .watch-toggle {
display: flex; display: flex;
justify-content: flex-end; justify-content: space-between;
gap: 0.75rem;
} }
.toggle-btn { .toggle-btn {

View File

@@ -1,110 +1,110 @@
import { ref, watch, onUnmounted } from 'vue' import { ref, watch, onUnmounted } from 'vue'
export function useCamera(videoRef) { export function useCamera(videoRef) {
const stream = ref(null) const stream = ref(null)
const facingMode = ref('environment') const facingMode = ref('environment')
const hasMultipleCameras = ref(false) const hasMultipleCameras = ref(false)
const isMirrored = ref(false) const isMirrored = ref(false)
const error = ref('') const error = ref('')
const checkCameras = async () => { const checkCameras = async () => {
try { try {
if (!navigator.mediaDevices || !navigator.mediaDevices.enumerateDevices) { if (!navigator.mediaDevices || !navigator.mediaDevices.enumerateDevices) {
return return
} }
const devices = await navigator.mediaDevices.enumerateDevices() const devices = await navigator.mediaDevices.enumerateDevices()
const cameras = devices.filter(d => d.kind === 'videoinput') const cameras = devices.filter(d => d.kind === 'videoinput')
hasMultipleCameras.value = cameras.length > 1 hasMultipleCameras.value = cameras.length > 1
} catch (e) { } catch (e) {
console.error('Error checking cameras:', e) console.error('Error checking cameras:', e)
}
}
const stopCamera = () => {
if (stream.value) {
stream.value.getTracks().forEach(t => t.stop())
stream.value = null
}
}
const startCamera = async () => {
stopCamera()
error.value = ''
try {
const constraints = {
video: {
facingMode: facingMode.value,
width: { ideal: 1280 },
height: { ideal: 720 }
} }
} }
const stopCamera = () => { const mediaStream = await navigator.mediaDevices.getUserMedia(constraints)
if (stream.value) { stream.value = mediaStream
stream.value.getTracks().forEach(t => t.stop())
stream.value = null // Detect actual facing mode to mirror front camera correctly
const videoTrack = mediaStream.getVideoTracks()[0]
if (videoTrack) {
const settings = videoTrack.getSettings()
if (settings.facingMode) {
isMirrored.value = settings.facingMode === 'user'
} else {
// Fallback: check label for desktop cameras or assume requested mode
const label = videoTrack.label ? videoTrack.label.toLowerCase() : ''
if (label.includes('front') || label.includes('facetime') || label.includes('macbook')) {
isMirrored.value = true
} else {
isMirrored.value = facingMode.value === 'user'
}
} }
}
if (videoRef.value) {
videoRef.value.srcObject = mediaStream
return new Promise((resolve) => {
videoRef.value.onloadedmetadata = () => {
videoRef.value.play().catch(e => console.error('Play error', e))
resolve()
}
})
}
} catch (err) {
if (err.name === 'NotAllowedError') {
error.value = 'Camera permission denied'
} else if (err.name === 'NotFoundError') {
error.value = 'No camera found'
} else {
error.value = `Camera error: ${err.name}`
}
throw err // Let caller know it failed
} }
}
const startCamera = async () => { const switchCamera = () => {
stopCamera() facingMode.value = facingMode.value === 'environment' ? 'user' : 'environment'
error.value = '' }
try { watch(facingMode, () => {
const constraints = { if (stream.value) {
video: { // Re-start if already running
facingMode: facingMode.value, startCamera().catch(() => { })
width: { ideal: 1280 },
height: { ideal: 720 }
}
}
const mediaStream = await navigator.mediaDevices.getUserMedia(constraints)
stream.value = mediaStream
// Detect actual facing mode to mirror front camera correctly
const videoTrack = mediaStream.getVideoTracks()[0]
if (videoTrack) {
const settings = videoTrack.getSettings()
if (settings.facingMode) {
isMirrored.value = settings.facingMode === 'user'
} else {
// Fallback: check label for desktop cameras or assume requested mode
const label = videoTrack.label ? videoTrack.label.toLowerCase() : ''
if (label.includes('front') || label.includes('facetime') || label.includes('macbook')) {
isMirrored.value = true
} else {
isMirrored.value = facingMode.value === 'user'
}
}
}
if (videoRef.value) {
videoRef.value.srcObject = mediaStream
return new Promise((resolve) => {
videoRef.value.onloadedmetadata = () => {
videoRef.value.play().catch(e => console.error('Play error', e))
resolve()
}
})
}
} catch (err) {
if (err.name === 'NotAllowedError') {
error.value = 'Camera permission denied'
} else if (err.name === 'NotFoundError') {
error.value = 'No camera found'
} else {
error.value = `Camera error: ${err.name}`
}
throw err // Let caller know it failed
}
} }
})
const switchCamera = () => { onUnmounted(() => {
facingMode.value = facingMode.value === 'environment' ? 'user' : 'environment' stopCamera()
} })
watch(facingMode, () => { return {
if (stream.value) { stream,
// Re-start if already running facingMode,
startCamera().catch(() => { }) hasMultipleCameras,
} isMirrored,
}) error,
checkCameras,
onUnmounted(() => { startCamera,
stopCamera() stopCamera,
}) switchCamera
}
return {
stream,
facingMode,
hasMultipleCameras,
isMirrored,
error,
checkCameras,
startCamera,
stopCamera,
switchCamera
}
} }

View File

@@ -1,174 +1,174 @@
import { ref, onMounted, onUnmounted } from 'vue' import { ref, onMounted, onUnmounted } from 'vue'
export function useQrDetection(videoRef, overlayCanvasRef) { export function useQrDetection(videoRef, overlayCanvasRef) {
const barcodeDetector = ref(null) let barcodeDetector = null // must be plain variable, NOT a Vue ref (Proxy breaks native private fields)
const isDetecting = ref(false) const isDetecting = ref(false)
const error = ref('') const error = ref('')
let scanRafId = null let scanRafId = null
// Function to initialize detector // Function to initialize detector
const initDetector = async () => { const initDetector = async () => {
if (!barcodeDetector.value) { if (!barcodeDetector) {
if ('BarcodeDetector' in window) { if ('BarcodeDetector' in window) {
try {
// Formats are optional, but specifying qr_code might be faster
const formats = await window.BarcodeDetector.getSupportedFormats()
if (formats.includes('qr_code')) {
barcodeDetector.value = new window.BarcodeDetector({ formats: ['qr_code'] })
} else {
barcodeDetector.value = new window.BarcodeDetector()
}
} catch (e) {
// Fallback
barcodeDetector.value = new window.BarcodeDetector()
}
} else {
error.value = 'Barcode Detection API not supported on this device/browser.'
}
}
}
const paintDetections = (codes) => {
const canvas = overlayCanvasRef.value
const video = videoRef.value
if (!canvas || !video) return
const ctx = canvas.getContext('2d')
const { width, height } = canvas.getBoundingClientRect()
// Update canvas size if needed (to match CSS size)
if (canvas.width !== width || canvas.height !== height) {
canvas.width = width
canvas.height = height
}
ctx.clearRect(0, 0, width, height)
if (!codes || codes.length === 0) return
const vw = video.videoWidth
const vh = video.videoHeight
if (!vw || !vh) return
// Calculate object-fit: cover scaling
const videoRatio = vw / vh
const canvasRatio = width / height
let drawWidth, drawHeight, startX, startY
if (canvasRatio > videoRatio) {
// Canvas is wider than video (video cropped top/bottom)
drawWidth = width
drawHeight = width / videoRatio
startX = 0
startY = (height - drawHeight) / 2
} else {
// Canvas is taller than video (video cropped left/right)
drawHeight = height
drawWidth = height * videoRatio
startY = 0
startX = (width - drawWidth) / 2
}
const scale = drawWidth / vw
// Styles
const styles = getComputedStyle(document.documentElement)
const accent = styles.getPropertyValue('--primary-accent').trim() || '#00f2fe'
ctx.lineWidth = 4
ctx.strokeStyle = accent
ctx.fillStyle = accent
codes.forEach(code => {
const points = code.cornerPoints
if (!points || points.length < 4) return
ctx.beginPath()
const transform = (p) => {
let x = p.x * scale + startX
let y = p.y * scale + startY
return { x, y }
}
const p0 = transform(points[0])
ctx.moveTo(p0.x, p0.y)
for (let i = 1; i < points.length; i++) {
const p = transform(points[i])
ctx.lineTo(p.x, p.y)
}
ctx.closePath()
ctx.stroke()
// Draw corners
points.forEach(p => {
const tp = transform(p)
ctx.beginPath()
ctx.arc(tp.x, tp.y, 4, 0, Math.PI * 2)
ctx.fill()
})
})
}
const startDetection = async (onDetectCallback) => {
error.value = ''
try { try {
await initDetector() const formats = await window.BarcodeDetector.getSupportedFormats()
if (!barcodeDetector.value) { if (formats.includes('qr_code')) {
if (!error.value) error.value = 'Barcode Detector failed to initialize' barcodeDetector = new window.BarcodeDetector({ formats: ['qr_code'] })
return } else {
} barcodeDetector = new window.BarcodeDetector()
}
isDetecting.value = true
const detectLoop = async () => {
if (!isDetecting.value || !videoRef.value || videoRef.value.paused || videoRef.value.ended) {
scanRafId = requestAnimationFrame(detectLoop)
return
}
try {
const codes = await barcodeDetector.value.detect(videoRef.value)
paintDetections(codes)
if (codes.length > 0 && onDetectCallback) {
onDetectCallback(codes)
}
} catch (e) {
// Silent catch for intermittent detection frames failing
}
if (isDetecting.value) {
scanRafId = requestAnimationFrame(detectLoop)
}
}
detectLoop() // start loop
} catch (e) { } catch (e) {
error.value = `Detection error: ${e.message}` barcodeDetector = new window.BarcodeDetector()
} }
} else {
error.value = 'Barcode Detection API not supported on this device/browser.'
}
}
}
const paintDetections = (codes) => {
const canvas = overlayCanvasRef.value
const video = videoRef.value
if (!canvas || !video) return
const ctx = canvas.getContext('2d')
const { width, height } = canvas.getBoundingClientRect()
// Update canvas size if needed (to match CSS size)
if (canvas.width !== width || canvas.height !== height) {
canvas.width = width
canvas.height = height
} }
const stopDetection = () => { ctx.clearRect(0, 0, width, height)
isDetecting.value = false
if (scanRafId) cancelAnimationFrame(scanRafId) if (!codes || codes.length === 0) return
// Clear canvas
if (overlayCanvasRef.value) { const vw = video.videoWidth
const ctx = overlayCanvasRef.value.getContext('2d') const vh = video.videoHeight
ctx.clearRect(0, 0, overlayCanvasRef.value.width, overlayCanvasRef.value.height) if (!vw || !vh) return
}
// Calculate object-fit: cover scaling
const videoRatio = vw / vh
const canvasRatio = width / height
let drawWidth, drawHeight, startX, startY
if (canvasRatio > videoRatio) {
// Canvas is wider than video (video cropped top/bottom)
drawWidth = width
drawHeight = width / videoRatio
startX = 0
startY = (height - drawHeight) / 2
} else {
// Canvas is taller than video (video cropped left/right)
drawHeight = height
drawWidth = height * videoRatio
startY = 0
startX = (width - drawWidth) / 2
} }
onUnmounted(() => { const scale = drawWidth / vw
stopDetection() // Styles
const styles = getComputedStyle(document.documentElement)
const accent = styles.getPropertyValue('--primary-accent').trim() || '#00f2fe'
ctx.lineWidth = 4
ctx.strokeStyle = accent
ctx.fillStyle = accent
codes.forEach(code => {
const points = code.cornerPoints
if (!points || points.length < 4) return
ctx.beginPath()
const transform = (p) => {
let x = p.x * scale + startX
let y = p.y * scale + startY
return { x, y }
}
const p0 = transform(points[0])
ctx.moveTo(p0.x, p0.y)
for (let i = 1; i < points.length; i++) {
const p = transform(points[i])
ctx.lineTo(p.x, p.y)
}
ctx.closePath()
ctx.stroke()
// Draw corners
points.forEach(p => {
const tp = transform(p)
ctx.beginPath()
ctx.arc(tp.x, tp.y, 4, 0, Math.PI * 2)
ctx.fill()
})
}) })
}
return { const startDetection = async (onDetectCallback) => {
error, error.value = ''
isDetecting, try {
startDetection, await initDetector()
stopDetection if (!barcodeDetector) {
if (!error.value) error.value = 'Barcode Detector failed to initialize'
return
}
isDetecting.value = true
const detectLoop = async () => {
const video = videoRef.value
if (!isDetecting.value) return
if (!video || video.readyState < 2) {
scanRafId = requestAnimationFrame(detectLoop)
return
}
try {
const codes = await barcodeDetector.detect(video)
paintDetections(codes)
if (codes.length > 0 && onDetectCallback) {
onDetectCallback(codes)
}
} catch (e) {
// Silent catch for intermittent detection frames failing
}
if (isDetecting.value) {
scanRafId = requestAnimationFrame(detectLoop)
}
}
detectLoop() // start loop
} catch (e) {
error.value = `Detection error: ${e.message}`
} }
}
const stopDetection = () => {
isDetecting.value = false
if (scanRafId) cancelAnimationFrame(scanRafId)
// Clear canvas
if (overlayCanvasRef.value) {
const ctx = overlayCanvasRef.value.getContext('2d')
ctx.clearRect(0, 0, overlayCanvasRef.value.width, overlayCanvasRef.value.height)
}
}
onUnmounted(() => {
stopDetection()
})
return {
error,
isDetecting,
startDetection,
stopDetection
}
} }

View File

@@ -44,6 +44,7 @@
--ripple-color: rgba(255, 255, 255, 0.3); --ripple-color: rgba(255, 255, 255, 0.3);
--nav-item-weight: 400; --nav-item-weight: 400;
--list-hover-bg: rgba(255, 255, 255, 0.05); --list-hover-bg: rgba(255, 255, 255, 0.05);
--list-border: rgba(255, 255, 255, 0.12);
--header-bg: rgba(0, 0, 0, 0.6); --header-bg: rgba(0, 0, 0, 0.6);
color: var(--text-color); color: var(--text-color);
@@ -61,7 +62,7 @@
:root[data-theme="light"] { :root[data-theme="light"] {
--bg-gradient: radial-gradient(circle at center, #f8fafc 0%, #e2e8f0 100%); --bg-gradient: radial-gradient(circle at center, #f8fafc 0%, #e2e8f0 100%);
--glass-bg: rgba(255, 255, 255, 0.85); --glass-bg: rgba(255, 255, 255, 0.85);
--glass-border: rgba(255, 255, 255, 0.8); --glass-border: rgba(15, 23, 42, 0.12);
--glass-shadow: 0 8px 32px 0 rgba(30, 41, 59, 0.15); --glass-shadow: 0 8px 32px 0 rgba(30, 41, 59, 0.15);
--text-color: #0f172a; --text-color: #0f172a;
--text-strong: #020617; --text-strong: #020617;
@@ -88,7 +89,8 @@
--button-active-shadow: 0 0 18px rgba(14, 165, 233, 0.25); --button-active-shadow: 0 0 18px rgba(14, 165, 233, 0.25);
--title-gradient: linear-gradient(45deg, #0ea5e9, #6366f1); --title-gradient: linear-gradient(45deg, #0ea5e9, #6366f1);
--ripple-color: rgba(0, 0, 0, 0.1); --ripple-color: rgba(0, 0, 0, 0.1);
--list-hover-bg: rgba(255, 255, 255, 0.5); --list-hover-bg: rgba(15, 23, 42, 0.05);
--list-border: rgba(15, 23, 42, 0.08);
--header-bg: rgba(255, 255, 255, 0.6); --header-bg: rgba(255, 255, 255, 0.6);
} }
@@ -580,6 +582,14 @@ textarea:focus,
color: var(--text-strong); color: var(--text-strong);
} }
.history-actions,
.results-actions,
.header-actions {
display: flex;
gap: 0.5rem;
align-items: center;
}
.history-list, .history-list,
.codes-list { .codes-list {
flex: 1; flex: 1;
@@ -593,7 +603,7 @@ textarea:focus,
justify-content: space-between; justify-content: space-between;
align-items: center; align-items: center;
padding: 1rem; padding: 1rem;
border-bottom: 1px solid var(--glass-border); border-bottom: 1px solid var(--list-border);
transition: background 0.2s; transition: background 0.2s;
} }