pulumi/pkg/workspace/paths.go

167 lines
5.3 KiB
Go
Raw Normal View History

2017-02-25 16:25:33 +01:00
// Copyright 2016 Pulumi, Inc. All rights reserved.
package workspace
import (
"io/ioutil"
"os"
"path/filepath"
"strings"
2017-02-25 16:25:33 +01:00
"github.com/pulumi/coconut/pkg/compiler/errors"
"github.com/pulumi/coconut/pkg/diag"
"github.com/pulumi/coconut/pkg/encoding"
"github.com/pulumi/coconut/pkg/tokens"
)
const Nutfile = "Nut" // the base name of a Nutfile.
const Nutpack = "Nutpack" // the base name of a compiled NutPack.
const NutpackOutDir = "nutpack" // the default name of the NutPack output directory.
const NutpackBinDir = "bin" // the default name of the NutPack binary output directory.
const NutpackHusksDir = "husks" // the default name of the NutPack husks directory.
const Nutspace = "Coconut" // the base name of a markup file for shared settings in a workspace.
const Nutdeps = ".Nuts" // the directory in which dependencies exist, either local or global.
const InstallRootEnvvar = "COCOROOT" // the envvar describing where Coconut has been installed.
const InstallRootLibdir = "lib" // the directory in which the Coconut standard library exists.
const DefaultInstallRoot = "/usr/local/coconut" // where Coconut is installed by default.
2017-02-25 16:25:33 +01:00
// InstallRoot returns Coconut's installation location. This is controlled my the COCOROOT envvar.
func InstallRoot() string {
// TODO: support Windows.
root := os.Getenv(InstallRootEnvvar)
if root == "" {
return DefaultInstallRoot
}
return root
}
// HuskPath returns a path to the given husk's default location.
func HuskPath(husk tokens.QName) string {
path := filepath.Join(NutpackOutDir, NutpackHusksDir)
if husk != "" {
path = filepath.Join(path, qnamePath(husk)+encoding.Exts[0])
}
return path
}
// isTop returns true if the path represents the top of the filesystem.
func isTop(path string) bool {
return os.IsPathSeparator(path[len(path)-1])
}
// pathDir returns the nearest directory to the given path (identity if a directory; parent otherwise).
func pathDir(path string) string {
2017-02-25 16:25:33 +01:00
// It's possible that the path is a file (e.g., a Nut.yaml file); if so, we want the directory.
info, err := os.Stat(path)
if err != nil || info.IsDir() {
return path
}
2017-01-28 00:42:39 +01:00
return filepath.Dir(path)
}
Begin overhauling semantic phases This change further merges the new AST and MuPack/MuIL formats and abstractions into the core of the compiler. A good amount of the old code is gone now; I decided against ripping it all out in one fell swoop so that I can methodically check that we are preserving all relevant decisions and/or functionality we had in the old model. The changes are too numerous to outline in this commit message, however, here are the noteworthy ones: * Split up the notion of symbols and tokens, resulting in: - pkg/symbols for true compiler symbols (bound nodes) - pkg/tokens for name-based tokens, identifiers, constants * Several packages move underneath pkg/compiler: - pkg/ast becomes pkg/compiler/ast - pkg/errors becomes pkg/compiler/errors - pkg/symbols becomes pkg/compiler/symbols * pkg/ast/... becomes pkg/compiler/legacy/ast/... * pkg/pack/ast becomes pkg/compiler/ast. * pkg/options goes away, merged back into pkg/compiler. * All binding functionality moves underneath a dedicated package, pkg/compiler/binder. The legacy.go file contains cruft that will eventually go away, while the other files represent a halfway point between new and old, but are expected to stay roughly in the current shape. * All parsing functionality is moved underneath a new pkg/compiler/metadata namespace, and we adopt new terminology "metadata reading" since real parsing happens in the MetaMu compilers. Hence, Parser has become metadata.Reader. * In general phases of the compiler no longer share access to the actual compiler.Compiler object. Instead, shared state is moved to the core.Context object underneath pkg/compiler/core. * Dependency resolution during binding has been rewritten to the new model, including stashing bound package symbols in the context object, and detecting import cycles. * Compiler construction does not take a workspace object. Instead, creation of a workspace is entirely hidden inside of the compiler's constructor logic. * There are three Compile* functions on the Compiler interface, to support different styles of invoking compilation: Compile() auto- detects a Mu package, based on the workspace; CompilePath(string) loads the target as a Mu package and compiles it, regardless of the workspace settings; and, CompilePackage(*pack.Package) will compile a pre-loaded package AST, again regardless of workspace. * Delete the _fe, _sema, and parsetree phases. They are no longer relevant and the functionality is largely subsumed by the above. ...and so very much more. I'm surprised I ever got this to compile again!
2017-01-18 21:18:37 +01:00
// DetectPackage locates the closest package from the given path, searching "upwards" in the directory hierarchy. If no
2017-02-25 16:25:33 +01:00
// Nutfile is found, an empty path is returned. If problems are detected, they are logged to the diag.Sink.
Begin overhauling semantic phases This change further merges the new AST and MuPack/MuIL formats and abstractions into the core of the compiler. A good amount of the old code is gone now; I decided against ripping it all out in one fell swoop so that I can methodically check that we are preserving all relevant decisions and/or functionality we had in the old model. The changes are too numerous to outline in this commit message, however, here are the noteworthy ones: * Split up the notion of symbols and tokens, resulting in: - pkg/symbols for true compiler symbols (bound nodes) - pkg/tokens for name-based tokens, identifiers, constants * Several packages move underneath pkg/compiler: - pkg/ast becomes pkg/compiler/ast - pkg/errors becomes pkg/compiler/errors - pkg/symbols becomes pkg/compiler/symbols * pkg/ast/... becomes pkg/compiler/legacy/ast/... * pkg/pack/ast becomes pkg/compiler/ast. * pkg/options goes away, merged back into pkg/compiler. * All binding functionality moves underneath a dedicated package, pkg/compiler/binder. The legacy.go file contains cruft that will eventually go away, while the other files represent a halfway point between new and old, but are expected to stay roughly in the current shape. * All parsing functionality is moved underneath a new pkg/compiler/metadata namespace, and we adopt new terminology "metadata reading" since real parsing happens in the MetaMu compilers. Hence, Parser has become metadata.Reader. * In general phases of the compiler no longer share access to the actual compiler.Compiler object. Instead, shared state is moved to the core.Context object underneath pkg/compiler/core. * Dependency resolution during binding has been rewritten to the new model, including stashing bound package symbols in the context object, and detecting import cycles. * Compiler construction does not take a workspace object. Instead, creation of a workspace is entirely hidden inside of the compiler's constructor logic. * There are three Compile* functions on the Compiler interface, to support different styles of invoking compilation: Compile() auto- detects a Mu package, based on the workspace; CompilePath(string) loads the target as a Mu package and compiles it, regardless of the workspace settings; and, CompilePackage(*pack.Package) will compile a pre-loaded package AST, again regardless of workspace. * Delete the _fe, _sema, and parsetree phases. They are no longer relevant and the functionality is largely subsumed by the above. ...and so very much more. I'm surprised I ever got this to compile again!
2017-01-18 21:18:37 +01:00
func DetectPackage(path string, d diag.Sink) (string, error) {
// It's possible the target is already the file we seek; if so, return right away.
2017-02-25 16:25:33 +01:00
if IsNutfile(path, d) {
return path, nil
}
curr := pathDir(path)
for {
stop := false
2017-02-25 16:25:33 +01:00
// Enumerate the current path's files, checking each to see if it's a Nutfile.
files, err := ioutil.ReadDir(curr)
if err != nil {
return "", err
}
// See if there's a compiled Nutpack in the expected location.
pack := filepath.Join(NutpackOutDir, NutpackBinDir, Nutpack)
for _, ext := range encoding.Exts {
packfile := pack + ext
if IsNutpack(packfile, d) {
return packfile, nil
}
}
// Now look for individual Nutfiles.
for _, file := range files {
name := file.Name()
path := filepath.Join(curr, name)
2017-02-25 16:25:33 +01:00
if IsNutfile(path, d) {
return path, nil
2017-02-25 16:25:33 +01:00
} else if IsNutspace(path, d) {
// If we hit a Nutspace file, stop looking.
stop = true
}
}
// If we encountered a stop condition, break out of the loop.
if stop {
break
}
// If neither succeeded, keep looking in our parent directory.
curr = filepath.Dir(curr)
if isTop(curr) {
break
}
}
return "", nil
}
2017-02-25 16:25:33 +01:00
// IsNutfile returns true if the path references what appears to be a valid Nutfile. If problems are detected -- like
// an incorrect extension -- they are logged to the provided diag.Sink (if non-nil).
2017-02-25 16:25:33 +01:00
func IsNutfile(path string, d diag.Sink) bool {
return isMarkupFile(path, Nutfile, d)
}
2017-02-25 16:25:33 +01:00
// IsNutpack returns true if the path references what appears to be a valid Nutpack. If problems are detected -- like
// an incorrect extension -- they are logged to the provided diag.Sink (if non-nil).
2017-02-25 16:25:33 +01:00
func IsNutpack(path string, d diag.Sink) bool {
return isMarkupFile(path, Nutpack, d)
}
2017-02-25 16:25:33 +01:00
// IsNutspace returns true if the path references what appears to be a valid Nutspace file. If problems are detected --
// like an incorrect extension -- they are logged to the provided diag.Sink (if non-nil).
2017-02-25 16:25:33 +01:00
func IsNutspace(path string, d diag.Sink) bool {
return isMarkupFile(path, Nutspace, d)
}
func isMarkupFile(path string, expect string, d diag.Sink) bool {
info, err := os.Stat(path)
if err != nil || info.IsDir() {
// Missing files and directories can't be markup files.
return false
}
// Ensure the base name is expected.
name := info.Name()
ext := filepath.Ext(name)
base := strings.TrimSuffix(name, ext)
if base != expect {
if d != nil && strings.EqualFold(base, expect) {
// If the strings aren't equal, but case-insensitively match, issue a warning.
d.Warningf(errors.WarningIllegalMarkupFileCasing.AtFile(name), expect)
}
return false
}
// Check all supported extensions.
for _, mext := range encoding.Exts {
if name == expect+mext {
return true
}
}
// If we got here, it means the base name matched, but not the extension. Warn and return.
if d != nil {
d.Warningf(errors.WarningIllegalMarkupFileExt.AtFile(name), expect, ext)
}
return false
}