2024-07-11 13:02:54 -04:00
< ? xml version = "1.0" encoding = "utf-8" ? >
< ! DOCTYPE TS >
2024-07-12 16:14:58 -04:00
< TS version = "2.1" language = "en" >
2024-07-11 13:02:54 -04:00
< context >
< name > AddCollectionView < / name >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "45" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "45" / >
< source > ← Existing Collections < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "68" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "68" / >
< source > Add Document Collection < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "78" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "78" / >
< source > Add a folder containing plain text files , PDFs , or Markdown . Configure additional extensions in Settings . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "94" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "94" / >
< source > Please choose a directory < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "106" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "106" / >
< source > Name < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "121" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "121" / >
< source > Collection name . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "123" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "123" / >
< source > Name of the collection to add ( Required ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "139" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "139" / >
< source > Folder < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "156" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "156" / >
< source > Folder path . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "159" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "159" / >
< source > Folder path to documents ( Required ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "171" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "171" / >
< source > Browse < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddCollectionView.qml" line = "184" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddCollectionView.qml" line = "184" / >
< source > Create Collection < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > AddModelView < / name >
< message >
< location filename = "../qml/AddModelView.qml" line = "51" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "51" / >
< source > ← Existing Models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "71" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "71" / >
< source > Explore Models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "88" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "88" / >
< source > Discover and download models by keyword search . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "91" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "91" / >
< source > Text field for discovering and filtering downloadable models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "167" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "167" / >
< source > Initiate model discovery and filtering < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "168" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "168" / >
< source > Triggers discovery and filtering of models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "186" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "186" / >
< source > Default < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "186" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "186" / >
< source > Likes < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "186" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "186" / >
< source > Downloads < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "186" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "186" / >
< source > Recent < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "206" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "206" / >
< source > Asc < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "206" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "206" / >
< source > Desc < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "234" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "234" / >
< source > None < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "97" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "97" / >
< source > Searching · % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "193" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "193" / >
< source > Sort by : % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "218" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "218" / >
< source > Sort dir : % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "254" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "254" / >
< source > Limit : % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "287" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "287" / >
< source > Network error : could not retrieve % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "297" / >
< location filename = "../qml/AddModelView.qml" line = "560" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "297" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "560" / >
< source > Busy indicator < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "298" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "298" / >
< source > Displayed when the models request is ongoing < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "338" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "338" / >
< source > Model file < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "339" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "339" / >
< source > Model file to be downloaded < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "362" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "362" / >
< source > Description < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "363" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "363" / >
< source > File description < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "396" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "396" / >
< source > Cancel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "396" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "396" / >
< source > Resume < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "396" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "396" / >
< source > Download < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "404" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "404" / >
< source > Stop / restart / start the download < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "416" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "416" / >
< source > Remove < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "423" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "423" / >
< source > Remove model from filesystem < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "437" / >
< location filename = "../qml/AddModelView.qml" line = "446" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "437" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "446" / >
< source > Install < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "447" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "447" / >
< source > Install online model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "476" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "476" / >
< source > & lt ; strong & gt ; & lt ; font size = & quot ; 2 & quot ; & gt ; WARNING : Not recommended for your hardware . Model requires more memory ( % 1 GB ) than your system has available ( % 2 ) . & lt ; / s t r o n g & g t ; & l t ; / f o n t & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "628" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "628" / >
< source > % 1 GB < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "628" / >
< location filename = "../qml/AddModelView.qml" line = "650" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "628" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "650" / >
< source > ? < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "463" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "463" / >
< source > Describes an error that occurred when downloading < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "457" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "457" / >
< source > & lt ; strong & gt ; & lt ; font size = & quot ; 1 & quot ; & gt ; & lt ; a href = & quot ; # error & quot ; & gt ; Error & lt ; / a & g t ; & l t ; / s t r o n g & g t ; & l t ; / f o n t & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "482" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "482" / >
< source > Error for incompatible hardware < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "520" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "520" / >
< source > Download progressBar < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "521" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "521" / >
< source > Shows the progress made in the download < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "531" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "531" / >
< source > Download speed < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "532" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "532" / >
< source > Download speed in bytes / kilobytes / megabytes per second < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "549" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "549" / >
< source > Calculating . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "553" / >
< location filename = "../qml/AddModelView.qml" line = "582" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "553" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "582" / >
< source > Whether the file hash is being calculated < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "561" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "561" / >
< source > Displayed when the file hash is being calculated < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "579" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "579" / >
< source > enter $API_KEY < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "601" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "601" / >
< source > File size < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "623" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "623" / >
< source > RAM required < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "645" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "645" / >
< source > Parameters < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "667" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "667" / >
< source > Quant < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/AddModelView.qml" line = "689" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/AddModelView.qml" line = "689" / >
< source > Type < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ApplicationSettings < / name >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "16" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "16" / >
< source > Application < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "25" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "25" / >
< source > Network dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "26" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "26" / >
< source > opt - in to share feedback / conversations < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "37" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "37" / >
< source > ERROR : Update system could not find the MaintenanceTool used & lt ; br & gt ;
to check for updates ! & lt ; br & gt ; & lt ; br & gt ;
Did you install this application using the online installer ? If so , & lt ; br & gt ;
the MaintenanceTool executable should be located one directory & lt ; br & gt ;
above where this application resides on your filesystem . & lt ; br & gt ; & lt ; br & gt ;
If you can & apos ; t start it manually , then I & apos ; m afraid you & apos ; ll have to & lt ; br & gt ;
reinstall . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "48" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "48" / >
< source > Error dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "72" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "72" / >
< source > Application Settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "85" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "85" / >
< source > General < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "97" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "97" / >
< source > Theme < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "98" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "98" / >
< source > The application color scheme . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "110" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "110" / >
< source > Dark < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "110" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "110" / >
< source > Light < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "110" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "110" / >
< source > LegacyDark < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "131" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "131" / >
< source > Font Size < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "132" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "132" / >
< source > The size of text in the application . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "166" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "166" / >
< source > Language and Locale < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "167" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "167" / >
< source > The language and locale you wish to use . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ApplicationSettings.qml" line = "195" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "195" / >
2024-07-11 13:02:54 -04:00
< source > Device < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "196" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "196" / >
2024-07-11 13:02:54 -04:00
< source > The compute device used for text generation . & quot ; Auto & quot ; uses Vulkan or Metal . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "229" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "229" / >
2024-07-11 13:02:54 -04:00
< source > Default Model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "230" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "230" / >
2024-07-11 13:02:54 -04:00
< source > The preferred model for new chats . Also used as the local server fallback . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "262" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "262" / >
2024-07-11 13:02:54 -04:00
< source > Suggestion Mode < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "263" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "263" / >
2024-07-11 13:02:54 -04:00
< source > Generate suggested follow - up questions at the end of responses . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "274" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "274" / >
2024-07-11 13:02:54 -04:00
< source > When chatting with LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "274" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "274" / >
2024-07-11 13:02:54 -04:00
< source > Whenever possible < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "274" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "274" / >
2024-07-11 13:02:54 -04:00
< source > Never < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "286" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "286" / >
2024-07-11 13:02:54 -04:00
< source > Download Path < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "287" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "287" / >
2024-07-11 13:02:54 -04:00
< source > Where to store local models and the LocalDocs database . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "316" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "316" / >
2024-07-11 13:02:54 -04:00
< source > Browse < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "317" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "317" / >
2024-07-11 13:02:54 -04:00
< source > Choose where to save model files < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "328" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "328" / >
2024-07-11 13:02:54 -04:00
< source > Enable Datalake < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "329" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "329" / >
2024-07-11 13:02:54 -04:00
< source > Send chats and feedback to the GPT4All Open - Source Datalake . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "362" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "362" / >
2024-07-11 13:02:54 -04:00
< source > Advanced < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "374" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "374" / >
2024-07-11 13:02:54 -04:00
< source > CPU Threads < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "375" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "375" / >
2024-07-11 13:02:54 -04:00
< source > The number of CPU threads used for inference and embedding . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "406" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "406" / >
2024-07-11 13:02:54 -04:00
< source > Save Chat Context < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "407" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "407" / >
2024-07-11 13:02:54 -04:00
< source > Save the chat model & apos ; s state to disk for faster loading . WARNING : Uses ~ 2 GB per chat . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "423" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "423" / >
2024-07-11 13:02:54 -04:00
< source > Enable Local Server < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "424" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "424" / >
2024-07-11 13:02:54 -04:00
< source > Expose an OpenAI - Compatible server to localhost . WARNING : Results in increased resource usage . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "440" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "440" / >
2024-07-11 13:02:54 -04:00
< source > API Server Port < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "441" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "441" / >
2024-07-11 13:02:54 -04:00
< source > The port to use for the local server . Requires restart . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "493" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "493" / >
2024-07-11 13:02:54 -04:00
< source > Check For Updates < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "494" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "494" / >
2024-07-11 13:02:54 -04:00
< source > Manually check for an update to GPT4All . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
2024-07-12 16:14:58 -04:00
< location filename = "../qml/ApplicationSettings.qml" line = "503" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ApplicationSettings.qml" line = "503" / >
2024-07-11 13:02:54 -04:00
< source > Updates < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > Chat < / name >
< message >
< location filename = "../chat.h" line = "72" / >
< location filename = "../chat.cpp" line = "25" / >
< source > New Chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chat.cpp" line = "38" / >
< source > Server Chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ChatDrawer < / name >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "37" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "37" / >
< source > Drawer < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "38" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "38" / >
< source > Main navigation drawer < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "49" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "49" / >
< source > + New Chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "50" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "50" / >
< source > Create a new chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "199" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "199" / >
< source > Select the current chat or edit the chat when in edit mode < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "216" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "216" / >
< source > Edit chat name < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "229" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "229" / >
< source > Save chat name < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "246" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "246" / >
< source > Delete chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "283" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "283" / >
< source > Confirm chat deletion < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "305" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "305" / >
< source > Cancel chat deletion < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "317" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "317" / >
< source > List of chats < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatDrawer.qml" line = "318" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatDrawer.qml" line = "318" / >
< source > List of chats in the drawer dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ChatListModel < / name >
< message >
< location filename = "../chatlistmodel.h" line = "86" / >
< source > TODAY < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chatlistmodel.h" line = "88" / >
< source > THIS WEEK < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chatlistmodel.h" line = "90" / >
< source > THIS MONTH < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chatlistmodel.h" line = "92" / >
< source > LAST SIX MONTHS < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chatlistmodel.h" line = "94" / >
< source > THIS YEAR < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../chatlistmodel.h" line = "96" / >
< source > LAST YEAR < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ChatView < / name >
< message >
< location filename = "../qml/ChatView.qml" line = "77" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "77" / >
< source > & lt ; h3 & gt ; Warning & lt ; / h 3 & g t ; & l t ; p & g t ; % 1 & l t ; / p & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "86" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "86" / >
< source > Switch model dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "87" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "87" / >
< source > Warn the user if they switch models , then context will be erased < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "94" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "94" / >
< source > Conversation copied to clipboard . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "101" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "101" / >
< source > Code copied to clipboard . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "231" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "231" / >
< source > Chat panel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "232" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "232" / >
< source > Chat panel with options < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "339" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "339" / >
< source > Reload the currently loaded model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "353" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "353" / >
< source > Eject the currently loaded model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "365" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "365" / >
< source > No model installed . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "367" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "367" / >
< source > Model loading error . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "369" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "369" / >
< source > Waiting for model . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "371" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "371" / >
< source > Switching context . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "373" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "373" / >
< source > Choose a model . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "375" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "375" / >
< source > Not found : % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "463" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "463" / >
< source > The top item is the current model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "549" / >
< location filename = "../qml/ChatView.qml" line = "1307" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "549" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1307" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "567" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "567" / >
< source > Add documents < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "568" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "568" / >
< source > add collections of documents to the chat < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "732" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "732" / >
< source > Load the default model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "733" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "733" / >
< source > Loads the default model which can be changed in settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "744" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "744" / >
< source > No Model Installed < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "753" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "753" / >
< source > GPT4All requires that you install at least one
model to get started < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "765" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "765" / >
< source > Install a Model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "770" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "770" / >
< source > Shows the add model view < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "795" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "795" / >
< source > Conversation with the model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "796" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "796" / >
< source > prompt / response pairs from the conversation < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "848" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "848" / >
< source > GPT4All < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "848" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "848" / >
< source > You < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "870" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "870" / >
< source > recalculating context . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "872" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "872" / >
< source > response stopped . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "875" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "875" / >
< source > processing . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "876" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "876" / >
< source > generating response . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "877" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "877" / >
< source > generating questions . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "943" / >
< location filename = "../qml/ChatView.qml" line = "1899" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "943" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1899" / >
< source > Copy < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "949" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "949" / >
< source > Copy Message < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "959" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "959" / >
< source > Disable markdown < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "959" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "959" / >
< source > Enable markdown < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1049" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1049" / >
< source > Thumbs up < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1050" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1050" / >
< source > Gives a thumbs up to the response < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1083" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1083" / >
< source > Thumbs down < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1084" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1084" / >
< source > Opens thumbs down dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1139" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1139" / >
< source > % 1 Sources < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1383" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1383" / >
< source > Suggested follow - ups < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1659" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1659" / >
< source > Erase and reset chat session < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1680" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1680" / >
< source > Copy chat session to clipboard < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1706" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1706" / >
< source > Redo last chat response < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1955" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1955" / >
< source > Stop generating < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1956" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1956" / >
< source > Stop the current response generation < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1771" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1771" / >
< source > Reloads the model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "58" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "58" / >
< source > & lt ; h3 & gt ; Encountered an error loading model : & lt ; /h3><br><i>"%1"</i & gt ; & lt ; br & gt ; & lt ; br & gt ; Model loading failures can happen for a variety of reasons , but the most common causes include a bad file format , an incomplete or corrupted download , the wrong file type , not enough system RAM or an incompatible model type . Here are some suggestions for resolving the problem : & lt ; br & gt ; & lt ; ul & gt ; & lt ; li & gt ; Ensure the model file has a compatible format and type & lt ; li & gt ; Check the model file is complete in the download folder & lt ; li & gt ; You can find the download folder in the settings dialog & lt ; li & gt ; If you & apos ; ve sideloaded the model ensure the file is not corrupt by checking md5sum & lt ; li & gt ; Read more about what models are supported in our & lt ; a href = & quot ; https : //docs.gpt4all.io/">documentation</a> for the gui<li>Check out our <a href="https://discord.gg/4M2QFmTt2k">discord channel</a> for help</source>
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "377" / >
< location filename = "../qml/ChatView.qml" line = "1769" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "377" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1769" / >
< source > Reload · % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "379" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "379" / >
< source > Loading · % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "708" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "708" / >
< source > Load · % 1 ( default ) → < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "873" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "873" / >
< source > retrieving localdocs : % 1 . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "874" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "874" / >
< source > searching localdocs : % 1 . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1845" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1845" / >
< source > Send a message . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1845" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1845" / >
< source > Load a model to continue . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1848" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1848" / >
< source > Send messages / prompts to the model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1893" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1893" / >
< source > Cut < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1905" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1905" / >
< source > Paste < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1909" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1909" / >
< source > Select All < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1979" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1979" / >
< source > Send message < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ChatView.qml" line = "1980" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ChatView.qml" line = "1980" / >
< source > Sends the message / prompt contained in textfield to the model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > CollectionsDrawer < / name >
< message >
< location filename = "../qml/CollectionsDrawer.qml" line = "72" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "72" / >
< source > Warning : searching collections while indexing can return incomplete results < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message numerus = "yes" >
< location filename = "../qml/CollectionsDrawer.qml" line = "89" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "89" / >
< source > % n file ( s ) < / source >
< translation type = "unfinished" >
< numerusform > < / numerusform >
2024-07-12 16:14:58 -04:00
< numerusform > < / numerusform >
2024-07-11 13:02:54 -04:00
< / translation >
< / message >
< message numerus = "yes" >
< location filename = "../qml/CollectionsDrawer.qml" line = "89" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "89" / >
< source > % n word ( s ) < / source >
< translation type = "unfinished" >
< numerusform > < / numerusform >
2024-07-12 16:14:58 -04:00
< numerusform > < / numerusform >
2024-07-11 13:02:54 -04:00
< / translation >
< / message >
< message >
< location filename = "../qml/CollectionsDrawer.qml" line = "105" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "105" / >
< source > Updating < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/CollectionsDrawer.qml" line = "130" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "130" / >
< source > + Add Docs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/CollectionsDrawer.qml" line = "139" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/CollectionsDrawer.qml" line = "139" / >
< source > Select a collection to make it available to the chat model . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > HomeView < / name >
< message >
< location filename = "../qml/HomeView.qml" line = "49" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "49" / >
< source > Welcome to GPT4All < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "56" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "56" / >
< source > The privacy - first LLM chat application < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "66" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "66" / >
< source > Start chatting < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "81" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "81" / >
< source > Start Chatting < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "82" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "82" / >
< source > Chat with any LLM < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "92" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "92" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "93" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "93" / >
< source > Chat with your local files < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "103" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "103" / >
< source > Find Models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "104" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "104" / >
< source > Explore and download models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "190" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "190" / >
< source > Latest news < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "191" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "191" / >
< source > Latest news from GPT4All < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "222" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "222" / >
< source > Release Notes < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "228" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "228" / >
< source > Documentation < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "234" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "234" / >
< source > Discord < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "240" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "240" / >
< source > X ( Twitter ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "246" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "246" / >
< source > Github < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "257" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "257" / >
< source > GPT4All . io < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/HomeView.qml" line = "282" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/HomeView.qml" line = "282" / >
< source > Subscribe to Newsletter < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > LocalDocsSettings < / name >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "19" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "19" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "29" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "29" / >
< source > LocalDocs Settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "38" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "38" / >
< source > Indexing < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "51" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "51" / >
< source > Allowed File Extensions < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "52" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "52" / >
< source > Comma - separated list . LocalDocs will only attempt to process files with these extensions . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "100" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "100" / >
< source > Embedding < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "112" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "112" / >
< source > Use Nomic Embed API < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "113" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "113" / >
< source > Embed documents using the fast Nomic API instead of a private local model . Requires restart . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "130" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "130" / >
< source > Nomic API Key < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "131" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "131" / >
< source > API key to use for Nomic Embed . Get one from the Atlas & lt ; a href = & quot ; https : //atlas.nomic.ai/cli-login">API keys page</a>. Requires restart.</source>
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "165" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "165" / >
< source > Embeddings Device < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "166" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "166" / >
< source > The compute device used for embeddings . & quot ; Auto & quot ; uses the CPU . Requires restart . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "202" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "202" / >
< source > Display < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "215" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "215" / >
< source > Show Sources < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "216" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "216" / >
< source > Display the sources used for each response . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "233" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "233" / >
< source > Advanced < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "249" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "249" / >
< source > Warning : Advanced usage only . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "250" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "250" / >
< source > Values too large may cause localdocs failure , extremely slow responses or failure to respond at all . Roughly speaking , the { N chars x N snippets } are added to the model & apos ; s context window . More info & lt ; a href = & quot ; https : //docs.gpt4all.io/gpt4all_desktop/localdocs.html">here</a>.</source>
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "258" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "258" / >
< source > Document snippet size ( characters ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "259" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "259" / >
< source > Number of characters per document snippet . Larger numbers increase likelihood of factual responses , but also result in slower generation . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "284" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "284" / >
< source > Max document snippets per prompt < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsSettings.qml" line = "285" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsSettings.qml" line = "285" / >
< source > Max best N matches of retrieved document snippets to add to the context for prompt . Larger numbers increase likelihood of factual responses , but also result in slower generation . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > LocalDocsView < / name >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "52" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "52" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "58" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "58" / >
< source > Chat with your local files < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "71" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "71" / >
< source > + Add Collection < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "86" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "86" / >
< source > ERROR : The LocalDocs database is not valid . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "104" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "104" / >
< source > No Collections Installed < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "113" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "113" / >
< source > Install a collection of local documents to get started using this feature < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "124" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "124" / >
< source > + Add Doc Collection < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "129" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "129" / >
< source > Shows the add model view < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "226" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "226" / >
< source > Indexing progressBar < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "227" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "227" / >
< source > Shows the progress made in the indexing < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "252" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "252" / >
< source > ERROR < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "256" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "256" / >
< source > INDEXING < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "260" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "260" / >
< source > EMBEDDING < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "263" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "263" / >
< source > REQUIRES UPDATE < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "266" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "266" / >
< source > READY < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "268" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "268" / >
< source > INSTALLING < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "295" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "295" / >
< source > Indexing in progress < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "298" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "298" / >
< source > Embedding in progress < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "301" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "301" / >
< source > This collection requires an update after version change < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "304" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "304" / >
< source > Automatically reindexes upon changes to the folder < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "306" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "306" / >
< source > Installation in progress < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "320" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "320" / >
< source > % < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message numerus = "yes" >
< location filename = "../qml/LocalDocsView.qml" line = "332" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "332" / >
< source > % n file ( s ) < / source >
< translation type = "unfinished" >
< numerusform > < / numerusform >
2024-07-12 16:14:58 -04:00
< numerusform > < / numerusform >
2024-07-11 13:02:54 -04:00
< / translation >
< / message >
< message numerus = "yes" >
< location filename = "../qml/LocalDocsView.qml" line = "332" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "332" / >
< source > % n word ( s ) < / source >
< translation type = "unfinished" >
< numerusform > < / numerusform >
2024-07-12 16:14:58 -04:00
< numerusform > < / numerusform >
2024-07-11 13:02:54 -04:00
< / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "403" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "403" / >
< source > Remove < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "415" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "415" / >
< source > Rebuild < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "418" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "418" / >
< source > Reindex this folder from scratch . This is slow and usually not needed . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "425" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "425" / >
< source > Update < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/LocalDocsView.qml" line = "428" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/LocalDocsView.qml" line = "428" / >
< source > Update the collection to the new version . This is a slow operation . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ModelList < / name >
< message >
< location filename = "../modellist.cpp" line = "1537" / >
< source > & lt ; ul & gt ; & lt ; li & gt ; Requires personal OpenAI API key . & lt ; / l i & g t ; & l t ; l i & g t ; W A R N I N G : W i l l s e n d y o u r c h a t s t o O p e n A I ! & l t ; / l i & g t ; & l t ; l i & g t ; Y o u r A P I k e y w i l l b e s t o r e d o n d i s k & l t ; / l i & g t ; & l t ; l i & g t ; W i l l o n l y b e u s e d t o c o m m u n i c a t e w i t h O p e n A I & l t ; / l i & g t ; & l t ; l i & g t ; Y o u c a n a p p l y f o r a n A P I k e y & l t ; a h r e f = & q u o t ; h t t p s : / / p l a t f o r m . o p e n a i . c o m / a c c o u n t / a p i - k e y s & q u o t ; & g t ; h e r e . & l t ; / a & g t ; & l t ; / l i & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1556" / >
< source > & lt ; strong & gt ; OpenAI & apos ; s ChatGPT model GPT - 3.5 Turbo & lt ; / s t r o n g & g t ; & l t ; b r & g t ; % 1 < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1584" / >
< source > & lt ; strong & gt ; OpenAI & apos ; s ChatGPT model GPT - 4 & lt ; / s t r o n g & g t ; & l t ; b r & g t ; % 1 % 2 < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1615" / >
< source > & lt ; strong & gt ; Mistral Tiny model & lt ; / s t r o n g & g t ; & l t ; b r & g t ; % 1 < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1640" / >
< source > & lt ; strong & gt ; Mistral Small model & lt ; / s t r o n g & g t ; & l t ; b r & g t ; % 1 < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1666" / >
< source > & lt ; strong & gt ; Mistral Medium model & lt ; / s t r o n g & g t ; & l t ; b r & g t ; % 1 < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1569" / >
< source > & lt ; br & gt ; & lt ; br & gt ; & lt ; i & gt ; * Even if you pay OpenAI for ChatGPT - 4 this does not guarantee API key access . Contact OpenAI for more info . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "1596" / >
< source > & lt ; ul & gt ; & lt ; li & gt ; Requires personal Mistral API key . & lt ; / l i & g t ; & l t ; l i & g t ; W A R N I N G : W i l l s e n d y o u r c h a t s t o M i s t r a l ! & l t ; / l i & g t ; & l t ; l i & g t ; Y o u r A P I k e y w i l l b e s t o r e d o n d i s k & l t ; / l i & g t ; & l t ; l i & g t ; W i l l o n l y b e u s e d t o c o m m u n i c a t e w i t h M i s t r a l & l t ; / l i & g t ; & l t ; l i & g t ; Y o u c a n a p p l y f o r a n A P I k e y & l t ; a h r e f = & q u o t ; h t t p s : / / c o n s o l e . m i s t r a l . a i / u s e r / a p i - k e y s & q u o t ; & g t ; h e r e & l t ; / a & g t ; . & l t ; / l i & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../modellist.cpp" line = "2081" / >
< source > & lt ; strong & gt ; Created by % 1 . & lt ; /strong><br><ul><li>Published on %2.<li>This model has %3 likes.<li>This model has %4 downloads.<li>More info can be found <a href="https:/ / huggingface . co / % 5 & quot ; & gt ; here . & lt ; / a & g t ; & l t ; / u l & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ModelSettings < / name >
< message >
< location filename = "../qml/ModelSettings.qml" line = "14" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "14" / >
< source > Model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "33" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "33" / >
< source > Model Settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "83" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "83" / >
< source > Clone < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "93" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "93" / >
< source > Remove < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "107" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "107" / >
< source > Name < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "140" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "140" / >
< source > Model File < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "158" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "158" / >
< source > System Prompt < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "159" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "159" / >
< source > Prefixed at the beginning of every conversation . Must contain the appropriate framing tokens . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "205" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "205" / >
< source > Prompt Template < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "206" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "206" / >
< source > The template that wraps every prompt . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "210" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "210" / >
< source > Must contain the string & quot ; % 1 & quot ; to be replaced with the user & apos ; s input . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "255" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "255" / >
< source > Chat Name Prompt < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "256" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "256" / >
< source > Prompt used to automatically generate chat names . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "283" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "283" / >
< source > Suggested FollowUp Prompt < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "284" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "284" / >
< source > Prompt used to generate suggested follow - up questions . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "322" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "322" / >
< source > Context Length < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "323" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "323" / >
< source > Number of input and output tokens the model sees . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "344" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "344" / >
< source > Maximum combined prompt / response tokens before information is lost .
Using more context than the model was trained on will yield poor results .
NOTE : Does not take effect until you reload the model . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "382" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "382" / >
< source > Temperature < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "383" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "383" / >
< source > Randomness of model output . Higher - & gt ; more variation . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "394" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "394" / >
< source > Temperature increases the chances of choosing less likely tokens .
NOTE : Higher temperature gives more creative but less predictable outputs . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "428" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "428" / >
< source > Top - P < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "429" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "429" / >
< source > Nucleus Sampling factor . Lower - & gt ; more predicatable . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "439" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "439" / >
< source > Only the most likely tokens up to a total probability of top_p can be chosen .
NOTE : Prevents choosing highly unlikely tokens . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "473" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "473" / >
< source > Min - P < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "474" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "474" / >
< source > Minimum token probability . Higher - & gt ; more predictable . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "484" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "484" / >
< source > Sets the minimum relative probability for a token to be considered . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "520" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "520" / >
< source > Top - K < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "521" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "521" / >
< source > Size of selection pool for tokens . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "532" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "532" / >
< source > Only the top K most likely tokens will be chosen from . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "567" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "567" / >
< source > Max Length < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "568" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "568" / >
< source > Maximum response length , in tokens . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "613" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "613" / >
< source > Prompt Batch Size < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "614" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "614" / >
< source > The batch size used for prompt processing . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "625" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "625" / >
< source > Amount of prompt tokens to process at once .
NOTE : Higher values can speed up reading prompts but will use more RAM . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "660" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "660" / >
< source > Repeat Penalty < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "661" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "661" / >
< source > Repetition penalty factor . Set to 1 to disable . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "705" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "705" / >
< source > Repeat Penalty Tokens < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "706" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "706" / >
< source > Number of previous tokens used for penalty . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "751" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "751" / >
< source > GPU Layers < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "752" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "752" / >
< source > Number of model layers to load into VRAM . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelSettings.qml" line = "763" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelSettings.qml" line = "763" / >
< source > How many model layers to load into VRAM . Decrease this if GPT4All runs out of VRAM while loading this model .
Lower values increase CPU load and RAM usage , and make inference slower .
NOTE : Does not take effect until you reload the model . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ModelsView < / name >
< message >
< location filename = "../qml/ModelsView.qml" line = "36" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "36" / >
< source > No Models Installed < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "45" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "45" / >
< source > Install a model to get started using GPT4All < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "56" / >
< location filename = "../qml/ModelsView.qml" line = "98" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "56" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "98" / >
< source > + Add Model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "61" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "61" / >
< source > Shows the add model view < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "79" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "79" / >
< source > Installed Models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "85" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "85" / >
< source > Locally installed chat models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "143" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "143" / >
< source > Model file < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "144" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "144" / >
< source > Model file to be downloaded < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "166" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "166" / >
< source > Description < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "167" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "167" / >
< source > File description < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "192" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "192" / >
< source > Cancel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "192" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "192" / >
< source > Resume < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "200" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "200" / >
< source > Stop / restart / start the download < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "212" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "212" / >
< source > Remove < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "219" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "219" / >
< source > Remove model from filesystem < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "233" / >
< location filename = "../qml/ModelsView.qml" line = "242" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "233" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "242" / >
< source > Install < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "243" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "243" / >
< source > Install online model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "253" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "253" / >
< source > & lt ; strong & gt ; & lt ; font size = & quot ; 1 & quot ; & gt ; & lt ; a href = & quot ; # error & quot ; & gt ; Error & lt ; / a & g t ; & l t ; / s t r o n g & g t ; & l t ; / f o n t & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "272" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "272" / >
< source > & lt ; strong & gt ; & lt ; font size = & quot ; 2 & quot ; & gt ; WARNING : Not recommended for your hardware . Model requires more memory ( % 1 GB ) than your system has available ( % 2 ) . & lt ; / s t r o n g & g t ; & l t ; / f o n t & g t ; < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "424" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "424" / >
< source > % 1 GB < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "424" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "424" / >
< source > ? < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "259" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "259" / >
< source > Describes an error that occurred when downloading < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "278" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "278" / >
< source > Error for incompatible hardware < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "316" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "316" / >
< source > Download progressBar < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "317" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "317" / >
< source > Shows the progress made in the download < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "327" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "327" / >
< source > Download speed < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "328" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "328" / >
< source > Download speed in bytes / kilobytes / megabytes per second < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "345" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "345" / >
< source > Calculating . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "349" / >
< location filename = "../qml/ModelsView.qml" line = "378" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "349" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "378" / >
< source > Whether the file hash is being calculated < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "356" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "356" / >
< source > Busy indicator < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "357" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "357" / >
< source > Displayed when the file hash is being calculated < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "375" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "375" / >
< source > enter $API_KEY < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "397" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "397" / >
< source > File size < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "419" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "419" / >
< source > RAM required < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "441" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "441" / >
< source > Parameters < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "463" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "463" / >
< source > Quant < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ModelsView.qml" line = "485" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ModelsView.qml" line = "485" / >
< source > Type < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > MyFancyLink < / name >
< message >
< location filename = "../qml/MyFancyLink.qml" line = "42" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/MyFancyLink.qml" line = "42" / >
< source > Fancy link < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/MyFancyLink.qml" line = "43" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/MyFancyLink.qml" line = "43" / >
< source > A stylized link < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > MySettingsStack < / name >
< message >
< location filename = "../qml/MySettingsStack.qml" line = "66" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/MySettingsStack.qml" line = "66" / >
< source > Please choose a directory < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > MySettingsTab < / name >
< message >
< location filename = "../qml/MySettingsTab.qml" line = "62" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/MySettingsTab.qml" line = "62" / >
< source > Restore Defaults < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/MySettingsTab.qml" line = "66" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/MySettingsTab.qml" line = "66" / >
< source > Restores settings dialog to a default state < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > NetworkDialog < / name >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "39" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "39" / >
< source > Contribute data to the GPT4All Opensource Datalake . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "55" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "55" / >
< source > By enabling this feature , you will be able to participate in the democratic process of training a large language model by contributing data for future model improvements .
When a GPT4All model responds to you and you have opted - in , your conversation will be sent to the GPT4All Open Source Datalake . Additionally , you can like / dislike its response . If you dislike a response , you can suggest an alternative response . This data will be collected and aggregated in the GPT4All Datalake .
NOTE : By turning on this feature , you will be sending your data to the GPT4All Open Source Datalake . You should have no expectation of chat privacy when this feature is enabled . You should ; however , have an expectation of an optional attribution if you wish . Your chat data will be openly available for anyone to download and will be used by Nomic AI to improve future GPT4All models . Nomic AI will retain all attribution information attached to your data and you will be credited as a contributor to any GPT4All model release that uses your data ! < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "63" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "63" / >
< source > Terms for opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "64" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "64" / >
< source > Describes what will happen when you opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "72" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "72" / >
< source > Please provide a name for attribution ( optional ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "74" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "74" / >
< source > Attribution ( optional ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "75" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "75" / >
< source > Provide attribution < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "88" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "88" / >
< source > Enable < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "89" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "89" / >
< source > Enable opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "93" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "93" / >
< source > Cancel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NetworkDialog.qml" line = "94" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NetworkDialog.qml" line = "94" / >
< source > Cancel opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > NewVersionDialog < / name >
< message >
< location filename = "../qml/NewVersionDialog.qml" line = "34" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NewVersionDialog.qml" line = "34" / >
< source > New version is available < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NewVersionDialog.qml" line = "46" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NewVersionDialog.qml" line = "46" / >
< source > Update < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/NewVersionDialog.qml" line = "48" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/NewVersionDialog.qml" line = "48" / >
< source > Update to new version < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > PopupDialog < / name >
< message >
< location filename = "../qml/PopupDialog.qml" line = "38" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/PopupDialog.qml" line = "38" / >
< source > Reveals a shortlived help balloon < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/PopupDialog.qml" line = "48" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/PopupDialog.qml" line = "48" / >
< source > Busy indicator < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/PopupDialog.qml" line = "49" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/PopupDialog.qml" line = "49" / >
< source > Displayed when the popup is showing busy < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > SettingsView < / name >
< message >
< location filename = "../qml/SettingsView.qml" line = "22" / >
< location filename = "../qml/SettingsView.qml" line = "61" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "22" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "61" / >
< source > Settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SettingsView.qml" line = "23" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "23" / >
< source > Contains various application settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SettingsView.qml" line = "29" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "29" / >
< source > Application < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SettingsView.qml" line = "32" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "32" / >
< source > Model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SettingsView.qml" line = "35" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SettingsView.qml" line = "35" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > StartupDialog < / name >
< message >
< location filename = "../qml/StartupDialog.qml" line = "50" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "50" / >
< source > Welcome ! < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "67" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "67" / >
< source > # # # Release notes
% 1 # # # Contributors
% 2 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "71" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "71" / >
< source > Release notes < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "72" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "72" / >
< source > Release notes for this version < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "87" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "87" / >
< source > # # # Opt - ins for anonymous usage analytics and datalake
By enabling these features , you will be able to participate in the democratic process of training a
large language model by contributing data for future model improvements .
When a GPT4All model responds to you and you have opted - in , your conversation will be sent to the GPT4All
Open Source Datalake . Additionally , you can like / dislike its response . If you dislike a response , you
can suggest an alternative response . This data will be collected and aggregated in the GPT4All Datalake .
NOTE : By turning on this feature , you will be sending your data to the GPT4All Open Source Datalake .
You should have no expectation of chat privacy when this feature is enabled . You should ; however , have
an expectation of an optional attribution if you wish . Your chat data will be openly available for anyone
to download and will be used by Nomic AI to improve future GPT4All models . Nomic AI will retain all
attribution information attached to your data and you will be credited as a contributor to any GPT4All
model release that uses your data ! < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "106" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "106" / >
< source > Terms for opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "107" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "107" / >
< source > Describes what will happen when you opt - in < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "124" / >
< location filename = "../qml/StartupDialog.qml" line = "150" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "124" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "150" / >
< source > Opt - in for anonymous usage statistics < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "147" / >
< location filename = "../qml/StartupDialog.qml" line = "262" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "147" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "262" / >
< source > Yes < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "151" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "151" / >
< source > Allow opt - in for anonymous usage statistics < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "189" / >
< location filename = "../qml/StartupDialog.qml" line = "304" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "189" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "304" / >
< source > No < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "192" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "192" / >
< source > Opt - out for anonymous usage statistics < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "193" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "193" / >
< source > Allow opt - out for anonymous usage statistics < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "238" / >
< location filename = "../qml/StartupDialog.qml" line = "265" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "238" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "265" / >
< source > Opt - in for network < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "239" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "239" / >
< source > Allow opt - in for network < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "266" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "266" / >
< source > Allow opt - in anonymous sharing of chats to the GPT4All Datalake < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "307" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "307" / >
< source > Opt - out for network < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/StartupDialog.qml" line = "308" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/StartupDialog.qml" line = "308" / >
< source > Allow opt - out anonymous sharing of chats to the GPT4All Datalake < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > SwitchModelDialog < / name >
< message >
< location filename = "../qml/SwitchModelDialog.qml" line = "22" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SwitchModelDialog.qml" line = "22" / >
< source > & lt ; b & gt ; Warning : & lt ; / b & g t ; c h a n g i n g t h e m o d e l w i l l e r a s e t h e c u r r e n t c o n v e r s a t i o n . D o y o u w i s h t o c o n t i n u e ? < / s o u r c e >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SwitchModelDialog.qml" line = "33" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SwitchModelDialog.qml" line = "33" / >
< source > Continue < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SwitchModelDialog.qml" line = "34" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SwitchModelDialog.qml" line = "34" / >
< source > Continue with model loading < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/SwitchModelDialog.qml" line = "38" / >
< location filename = "../qml/SwitchModelDialog.qml" line = "39" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SwitchModelDialog.qml" line = "38" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/SwitchModelDialog.qml" line = "39" / >
< source > Cancel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > ThumbsDownDialog < / name >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "39" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "39" / >
< source > Please edit the text below to provide a better response . ( optional ) < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "54" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "54" / >
< source > Please provide a better response . . . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "64" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "64" / >
< source > Submit < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "65" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "65" / >
< source > Submits the user & apos ; s response < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "69" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "69" / >
< source > Cancel < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../qml/ThumbsDownDialog.qml" line = "70" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/qml/ThumbsDownDialog.qml" line = "70" / >
< source > Closes the response dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< context >
< name > main < / name >
< message >
< location filename = "../main.qml" line = "111" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "111" / >
< source > & lt ; h3 & gt ; Encountered an error starting up : & lt ; /h3><br><i>"Incompatible hardware detected."</i & gt ; & lt ; br & gt ; & lt ; br & gt ; Unfortunately , your CPU does not meet the minimal requirements to run this program . In particular , it does not support AVX intrinsics which this program requires to successfully run a modern large language model . The only solution at this time is to upgrade your hardware to a more modern CPU . & lt ; br & gt ; & lt ; br & gt ; See here for more information : & lt ; a href = & quot ; https : //en.wikipedia.org/wiki/Advanced_Vector_Extensions">https://en.wikipedia.org/wiki/Advanced_Vector_Extensions</a></source>
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "23" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "23" / >
< source > GPT4All v % 1 < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "127" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "127" / >
< source > & lt ; h3 & gt ; Encountered an error starting up : & lt ; /h3><br><i>"Inability to access settings file."</i & gt ; & lt ; br & gt ; & lt ; br & gt ; Unfortunately , something is preventing the program from accessing the settings file . This could be caused by incorrect permissions in the local app config directory where the settings file is located . Check out our & lt ; a href = & quot ; https : //discord.gg/4M2QFmTt2k">discord channel</a> for help.</source>
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "155" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "155" / >
< source > Connection to datalake failed . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "166" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "166" / >
< source > Saving chats . < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "177" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "177" / >
< source > Network dialog < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "178" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "178" / >
< source > opt - in to share feedback / conversations < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "231" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "231" / >
< source > Home view < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "232" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "232" / >
< source > Home view of application < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "240" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "240" / >
< source > Home < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "266" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "266" / >
< source > Chat view < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "267" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "267" / >
< source > Chat view to interact with models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "275" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "275" / >
< source > Chats < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "300" / >
< location filename = "../main.qml" line = "309" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "300" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "309" / >
< source > Models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "301" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "301" / >
< source > Models view for installed models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "334" / >
< location filename = "../main.qml" line = "343" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "334" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "343" / >
< source > LocalDocs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "335" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "335" / >
< source > LocalDocs view to configure and use local docs < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "368" / >
< location filename = "../main.qml" line = "377" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "368" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "377" / >
< source > Settings < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "369" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "369" / >
< source > Settings view for application configuration < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "422" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "422" / >
< source > The datalake is enabled < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "424" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "424" / >
< source > Using a network model < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "426" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "426" / >
< source > Server mode is enabled < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "640" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "640" / >
< source > Installed models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< message >
< location filename = "../main.qml" line = "641" / >
< location filename = "../../build_gpt4all-chat_Desktop_Qt_6_7_2/gpt4all/main.qml" line = "641" / >
< source > View of installed models < / source >
< translation type = "unfinished" > < / translation >
< / message >
< / context >
< / TS >